diff --git a/.backportrc.json b/.backportrc.json new file mode 100644 index 000000000000..1fc808b961be --- /dev/null +++ b/.backportrc.json @@ -0,0 +1,15 @@ +{ + "repoOwner": "saltstack", + "repoName": "salt", + "targetBranchChoices": ["master", "3006.x", "3005.x", "freeze"], + "autoMerge": false, + "autoMergeMethod": "rebase", + "branchLabelMapping": { + "^backport:(.+)$": "$1" + }, + "prTitle": "[BACKPORT] {commitMessages}", + "publishStatusCommentOnSuccess": true, + "sourcePRLabels": [ + "backport:complete" + ] +} diff --git a/.codecov.yml b/.codecov.yml index 5b59b7fcbd4c..f83118cada22 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -1,147 +1,94 @@ codecov: ci: - - jenkins.saltproject.io - github.com - max_report_age: 24 # The age you want coverage reports to expire at, or if you - # want to disable this check. Expired reports will not be processed by codecov. - require_ci_to_pass: yes # Less spammy. Only notify on passing builds. + max_report_age: 72 # The age, in hours, you want coverage reports to expire at, or if you + # want to disable this check. Expired reports will not be processed by codecov. + require_ci_to_pass: false - -# notify: -# after_n_builds: 25 # Only notify after N builds -# # This value is the output of: -# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"' -# wait_for_ci: yes # Should Codecov wait for all CI statuses to complete before sending ours. -# # Note: Codecov considers all non-codecov statues to be CI statuses + notify: + wait_for_ci: true # Should Codecov wait for all CI statuses to complete before sending ours. + # Note: Codecov considers all non-codecov statuses to be CI statuses +# manual_trigger: true # We manually tell codecov to merge and process all uploaded coverage reports ignore: - - ^*.py$ # python files at the repo root, ie, setup.py - - doc/.* # ignore any code under doc/ - - salt/ext/.* # ignore any code under salt/ext + - ^*.py$ # python files at the repo root, ie, setup.py + - doc/.* # ignore any code under doc/ + - salt/ext/.* # ignore any code under salt/ext coverage: round: up range: 70..100 precision: 2 - notify: {} - status: false + status: + project: + default: + target: auto # auto compares coverage to the previous base commit + threshold: 5% # adjust accordingly based on how flaky your tests are + # this allows a 5% drop from the previous base commit coverage + flags: + - salt + - tests -# status: -# project: # measuring the overall project coverage -# default: -# informational: true # Use Codecov in informational mode. Default is false. If true is specified the -# # resulting status will pass no matter what the coverage is or what other settings -# # are specified. Informational mode is great to use if you want to expose codecov -# # information to other developers in your pull request without necessarily gating -# # PRs on that information. -# target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against. -# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used. -# flags: -# - salt -# - tests -# salt: # declare a new status context "salt" -# informational: true -# paths: "!tests/" # remove all files in "tests/" -# target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against. -# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used. -# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded -# # options: success, error, failure -# if_not_found: success # if parent is not found report status as success, error, or failure -# if_ci_failed: error # if ci fails report status as success, error, or failure -# flags: -# - salt -# tests: # declare a new status context "tests" -# informational: true -# #target: 100% # we always want 100% coverage here -# target: auto # auto while we get this going -# base: auto # will use the pull request base if the commit is on a pull request. If not, the parent commit will be used. -# paths: "!salt/" # only include coverage in "tests/" folder -# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded -# # options: success, error, failure -# if_not_found: success # if parent is not found report status as success, error, or failure -# if_ci_failed: error # if ci fails report status as success, error, or failure -# flags: -# - tests + salt: # declare a new status context "salt" + paths: + - "!tests/" # remove all files in "tests/" + target: auto # will use the coverage from the base commit (pull request base or parent commit) coverage to compare against. + flags: + - salt -# patch: # pull requests only: this commit status will measure the -# # entire pull requests Coverage Diff. Checking if the lines -# # adjusted are covered at least X%. -# default: -# informational: true # Use Codecov in informational mode. Default is false. If true is specified the -# # resulting status will pass no matter what the coverage is or what other settings -# # are specified. Informational mode is great to use if you want to expose codecov -# # information to other developers in your pull request without necessarily gating -# # PRs on that information. -# target: 100% # Newly added lines must have 100% coverage -# if_no_uploads: error # will post commit status of "error" if no coverage reports were uploaded -# # options: success, error, failure -# if_not_found: success -# if_ci_failed: error -# flags: -# - salt -# - tests + tests: # declare a new status context "tests" + paths: + - "!salt/" # only include coverage in "tests/" folder + target: auto # auto while we get this going + flags: + - tests -# changes: # if there are any unexpected changes in coverage -# default: -# informational: true # Use Codecov in informational mode. Default is false. If true is specified the -# # resulting status will pass no matter what the coverage is or what other settings -# # are specified. Informational mode is great to use if you want to expose codecov -# # information to other developers in your pull request without necessarily gating -# # PRs on that information. -# if_no_uploads: error -# if_not_found: success -# if_ci_failed: error -# flags: -# - salt -# - tests + patch: # pull requests only: this commit status will measure the + # entire pull requests Coverage Diff. Checking if the lines + # adjusted are covered at least X%. + default: + target: auto # auto compares coverage to the previous base commit + threshold: 5% # adjust accordingly based on how flaky your tests are + # this allows a 5% drop from the previous base commit coverage flags: salt: paths: - salt/ - carryforward: true # https://docs.codecov.io/docs/carryforward-flags + carryforward: false # https://docs.codecov.io/docs/carryforward-flags tests: paths: - tests/ - carryforward: true + carryforward: false + pkg: + paths: + - pkg/tests + carryforward: false unit: paths: - tests/unit - tests/pytests/unit - carryforward: true + carryforward: false functional: paths: - tests/pytests/functional - carryforward: true + carryforward: false scenarios: paths: - tests/pytests/scenarios - carryforward: true + carryforward: false integration: paths: - tests/integration - tests/pytests/integration - carryforward: true - system: - paths: - - tests/integration - - tests/pytests/integration - carryforward: true + carryforward: false -#comment: -# layout: "reach, diff, flags, files" -# after_n_builds: 46 # Only comment on PRs after N builds -# # This value is the output of: -# # sh -c 'echo "$(ls .ci/ | grep kitchen | wc -l)"' -# -# behavior: new # Comment posting behaviour -# # default: update, if exists. Otherwise post new. -# # once: update, if exists. Otherwise post new. Skip if deleted. -# # new: delete old and post new. -# # spammy: post new (do not delete old comments). -# -# Disable Comments -comment: off +comment: + layout: "reach, diff, flags, files" + behavior: new # Comment posting behaviour + # default: update, if exists. Otherwise post new. + # once: update, if exists. Otherwise post new. Skip if deleted. + # new: delete old and post new. diff --git a/.coveragerc b/.coveragerc index 2579c96f9cdd..bdd2587d1143 100644 --- a/.coveragerc +++ b/.coveragerc @@ -7,6 +7,13 @@ relative_files = True omit = setup.py .nox/* +source_pkgs = + pkg.tests + salt + tests + tools +disable_warnings = module-not-imported + [report] # Regexes for lines to exclude from consideration @@ -32,7 +39,7 @@ ignore_errors = True [paths] salt = salt/ - artifacts/salt + artifacts/salt/lib/python3.*/site-packages/salt **/testing/salt/ **\testing\salt tests = diff --git a/.github/actionlint.yaml b/.github/actionlint.yaml index 0cb729af1405..f37fdbea9699 100644 --- a/.github/actionlint.yaml +++ b/.github/actionlint.yaml @@ -11,3 +11,4 @@ self-hosted-runner: - repo-release - medium - large + - macos-13-xlarge diff --git a/.github/actions/build-onedir-deps/action.yml b/.github/actions/build-onedir-deps/action.yml index 42b93a024e27..511fe5a52751 100644 --- a/.github/actions/build-onedir-deps/action.yml +++ b/.github/actions/build-onedir-deps/action.yml @@ -29,6 +29,7 @@ env: COLUMNS: 190 PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ PIP_EXTRA_INDEX_URL: https://pypi.org/simple + RELENV_BUILDENV: 1 runs: @@ -38,42 +39,25 @@ runs: - name: Cache Deps Onedir Package Directory id: onedir-pkg-cache - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: path: artifacts/${{ inputs.package-name }} - key: ${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles(format('{0}/.relenv/**/*.xz', github.workspace), 'requirements/static/pkg/*/*.txt', '.github/actions/build-onedir-deps/action.yml') }} - - - name: Create Onedir Directory - shell: bash - if: steps.onedir-pkg-cache.outputs.cache-hit != 'true' - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - python3 -m relenv create --arch=${{ inputs.arch }} --python=${{ inputs.python-version }} artifacts/${{ inputs.package-name }} - - - name: Get Ondir Python Version - id: get-python-version - if: steps.onedir-pkg-cache.outputs.cache-hit != 'true' - uses: ./.github/actions/get-python-version - with: - python-binary: artifacts/${{ inputs.package-name }}/${{ inputs.platform == 'windows' && 'Scripts/python' || 'bin/python3' }} - - - name: Install Wheel & Upgrade Setuptools & Pip - shell: bash - if: steps.onedir-pkg-cache.outputs.cache-hit != 'true' - run: | - ${{ steps.get-python-version.outputs.binary }} -m pip install -U "wheel" - ${{ steps.get-python-version.outputs.binary }} -m pip install -U "pip>=22.3.1,<23.0" - ${{ steps.get-python-version.outputs.binary }} -m pip install -U "setuptools>=65.6.3,<66" + key: > + ${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ + hashFiles( + format('{0}/.relenv/**/*.xz', github.workspace), + 'requirements/static/pkg/*/*.txt', + '.github/actions/build-onedir-deps/action.yml', + '.github/workflows/build-deps-onedir-*.yml', + 'cicd/shared-gh-workflows-context.yml' + ) + }} - name: Install Salt Onedir Package Dependencies shell: bash if: steps.onedir-pkg-cache.outputs.cache-hit != 'true' run: | - if [ "${{ inputs.platform }}" != "windows" ]; then - ${{ steps.get-python-version.outputs.binary }} -m pip install -r requirements/static/pkg/py${{ steps.get-python-version.outputs.version }}/${{ inputs.platform }}.txt --no-binary=':all:' - else - ${{ steps.get-python-version.outputs.binary }} -m pip install -r requirements/static/pkg/py${{ steps.get-python-version.outputs.version }}/${{ inputs.platform }}.txt - fi + tools pkg build onedir-dependencies --arch ${{ inputs.arch }} --python-version ${{ inputs.python-version }} --package-name artifacts/${{ inputs.package-name }} --platform ${{ inputs.platform }} - name: Cleanup Salt Onedir Directory shell: bash diff --git a/.github/actions/build-onedir-salt/action.yml b/.github/actions/build-onedir-salt/action.yml index 94188eeef34b..50969bb8aae0 100644 --- a/.github/actions/build-onedir-salt/action.yml +++ b/.github/actions/build-onedir-salt/action.yml @@ -33,6 +33,7 @@ env: COLUMNS: 190 PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ PIP_EXTRA_INDEX_URL: https://pypi.org/simple + RELENV_BUILDENV: 1 runs: @@ -42,10 +43,19 @@ runs: - name: Download Cached Deps Onedir Package Directory id: onedir-bare-cache - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: path: artifacts/${{ inputs.package-name }} - key: ${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ hashFiles(format('{0}/.relenv/**/*.xz', github.workspace), 'requirements/static/pkg/*/*.txt', '.github/actions/build-onedir-deps/action.yml') }} + key: > + ${{ inputs.cache-prefix }}|${{ inputs.python-version }}|deps|${{ inputs.platform }}|${{ inputs.arch }}|${{ inputs.package-name }}|${{ + hashFiles( + format('{0}/.relenv/**/*.xz', github.workspace), + 'requirements/static/pkg/*/*.txt', + '.github/actions/build-onedir-deps/action.yml', + '.github/workflows/build-deps-onedir-*.yml', + 'cicd/shared-gh-workflows-context.yml' + ) + }} - name: Download Source Tarball uses: actions/download-artifact@v3 @@ -53,30 +63,9 @@ runs: name: salt-${{ inputs.salt-version }}.tar.gz - name: Install Salt Into Onedir - if: ${{ inputs.platform != 'windows' }} - env: - USE_STATIC_REQUIREMENTS: "1" - RELENV_PIP_DIR: "1" shell: bash run: | - artifacts/${{ inputs.package-name }}/bin/python3 -m pip install salt-${{ inputs.salt-version }}.tar.gz - if [ ${{ inputs.platform }} == "darwin" ]; then - pkg/macos/prep_salt.sh --build-dir ./artifacts/${{ inputs.package-name }} - rm -rf ./artifacts/${{ inputs.package-name }}/opt - rm -rf ./artifacts/${{ inputs.package-name }}/etc - rm -rf ./artifacts/${{ inputs.package-name }}/Library - fi - - - name: Install Salt Into Onedir (Windows) - if: ${{ inputs.platform == 'windows' }} - env: - USE_STATIC_REQUIREMENTS: "1" - shell: powershell - run: | - # install salt - pkg\windows\install_salt.cmd -BuildDir ".\artifacts\${{ inputs.package-name }}" -CICD -SourceTarball salt-${{ inputs.salt-version }}.tar.gz - # prep salt - pkg\windows\prep_salt.cmd -BuildDir ".\artifacts\${{ inputs.package-name }}" -CICD + tools pkg build salt-onedir salt-${{ inputs.salt-version }}.tar.gz --platform ${{ inputs.platform }} --package-name artifacts/${{ inputs.package-name }} - name: Cleanup Salt Onedir Directory shell: bash diff --git a/.github/actions/build-source-tarball/action.yml b/.github/actions/build-source-tarball/action.yml index b1e0c27f496f..f755951fc60b 100644 --- a/.github/actions/build-source-tarball/action.yml +++ b/.github/actions/build-source-tarball/action.yml @@ -25,21 +25,22 @@ runs: steps: - name: Download Release Patch + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: actions/download-artifact@v3 with: name: salt-${{ inputs.salt-version }}.patch - name: Configure Git + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} shell: bash run: | - git config --global user.name "Salt Project Packaging" - git config --global user.email saltproject-packaging@vmware.com + tools pkg configure-git - name: Apply Release Patch + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} shell: bash run: | - git am --committer-date-is-author-date salt-${{ inputs.salt-version }}.patch - rm salt-${{ inputs.salt-version }}.patch + tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete - name: Create Source Tarball shell: bash diff --git a/.github/actions/cached-virtualenv/action.yml b/.github/actions/cached-virtualenv/action.yml index 23ac4a410ff6..f135d9116e58 100644 --- a/.github/actions/cached-virtualenv/action.yml +++ b/.github/actions/cached-virtualenv/action.yml @@ -42,19 +42,29 @@ runs: run: | echo "cache-key=${{ inputs.cache-seed }}|${{ runner.os }}|${{ runner.arch }}|cached-venv|${{ steps.get-python-version.outputs.version }}|${{ inputs.name }}" >> "${GITHUB_OUTPUT}" + - name: Define VirtualEnv path + shell: bash + id: virtualenv-path + run: | + cd ${{ github.workspace }} > /dev/null 2>&1 || true + VENVS_PATH=$(echo ".venvs/py${{ steps.get-python-version.outputs.version }}" | python3 -c 'import sys, pathlib; sys.stdout.write(pathlib.Path.cwd().joinpath(sys.stdin.read()).as_posix())') + echo "venvs-path=$VENVS_PATH" | tee -a "$GITHUB_OUTPUT" + VENV_PATH=$(echo ".venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}" | python3 -c 'import sys, pathlib; sys.stdout.write(pathlib.Path.cwd().joinpath(sys.stdin.read()).as_posix())') + echo "venv-path=$VENV_PATH" | tee -a "$GITHUB_OUTPUT" + - name: Cache VirtualEnv id: cache-virtualenv - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: key: ${{ steps.setup-cache-key.outputs.cache-key }} - path: ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }} + path: ${{ steps.virtualenv-path.outputs.venv-path }} - name: Create Virtualenv shell: bash if: ${{ steps.cache-virtualenv.outputs.cache-hit != 'true' }} run: | - mkdir -p ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }} - python3 -m venv --upgrade ${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }} + mkdir -p ${{ steps.virtualenv-path.outputs.venvs-path }} + python3 -m venv --upgrade ${{ steps.virtualenv-path.outputs.venv-path }} - name: Define python executable output shell: bash @@ -62,10 +72,22 @@ runs: run: | shopt -s nocasematch if [[ "${{ runner.os }}" =~ "win" ]]; then - BIN_DIR="${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}/Scripts" + BIN_DIR="${{ steps.virtualenv-path.outputs.venv-path }}/Scripts" + PY_EXE="$BIN_DIR/python.exe" else - BIN_DIR="${{ github.workspace }}/.venvs/py${{ steps.get-python-version.outputs.version }}/${{ inputs.name }}/bin" + BIN_DIR="${{ steps.virtualenv-path.outputs.venv-path }}/bin" + PY_EXE="$BIN_DIR/python3" + if [ ! -f "$PY_EXE" ]; then + echo "The '${PY_EXE}' binary does not exist. Setting it to '$BIN_DIR/python' ..." + PY_EXE="$BIN_DIR/python" + fi + if [ ! -f "$PY_EXE" ]; then + echo "The '${PY_EXE}' binary does not exist. Showing the tree output for '${BIN_DIR}' ..." + tree -a "$BIN_DIR" + exit 1 + fi fi shopt -u nocasematch - echo "python-executable=$BIN_DIR/python" >> "${GITHUB_OUTPUT}" - echo "${BIN_DIR}" >> "${GITHUB_PATH}" + $PY_EXE --version + echo "python-executable=$PY_EXE" | tee -a "${GITHUB_OUTPUT}" + echo "${BIN_DIR}" | tee -a "${GITHUB_PATH}" diff --git a/.github/actions/get-python-version/action.yml b/.github/actions/get-python-version/action.yml index e64d285bca51..f2b045f7ca74 100644 --- a/.github/actions/get-python-version/action.yml +++ b/.github/actions/get-python-version/action.yml @@ -13,6 +13,8 @@ outputs: value: ${{ steps.get-python-version.outputs.version }} full-version: value: ${{ steps.get-python-version.outputs.full-version }} + version-sha256sum: + value: ${{ steps.get-python-version.outputs.version-sha256sum }} runs: @@ -20,12 +22,24 @@ runs: steps: + - name: Install System Packages + if: ${{ runner.os == 'macOS' }} + shell: bash + run: | + brew install coreutils + - name: Get Python Version id: get-python-version shell: bash run: | + echo "Python Binary: ${{ inputs.python-binary }}" echo "binary=${{ inputs.python-binary }}" >> "$GITHUB_OUTPUT" PY_VERSION=$(${{ inputs.python-binary }} -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info))") + echo "PY_VERSION=$PY_VERSION" echo "version=$PY_VERSION" >> "$GITHUB_OUTPUT" PY_FULL_VERSION=$(${{ inputs.python-binary }} -c "import sys; sys.stdout.write('{}.{}.{}'.format(*sys.version_info))") + echo "PY_FULL_VERSION=$PY_FULL_VERSION" echo "full-version=$PY_FULL_VERSION" >> "$GITHUB_OUTPUT" + VERSION_SHA256SUM=$(${{ inputs.python-binary }} --version --version | sha256sum | cut -d ' ' -f 1) + echo "VERSION_SHA256SUM=$VERSION_SHA256SUM" + echo "version-sha256sum=$VERSION_SHA256SUM" >> "$GITHUB_OUTPUT" diff --git a/.github/actions/setup-actionlint/action.yml b/.github/actions/setup-actionlint/action.yml index cbf835ea6275..f1a81aaf35ff 100644 --- a/.github/actions/setup-actionlint/action.yml +++ b/.github/actions/setup-actionlint/action.yml @@ -4,23 +4,27 @@ description: Setup actionlint inputs: version: description: The version of actionlint - default: v1.6.23 + default: 1.6.26 + cache-seed: + required: true + type: string + description: Seed used to invalidate caches runs: using: composite steps: - name: Cache actionlint Binary - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: path: /usr/local/bin/actionlint - key: ${{ runner.os }}-${{ runner.arch }}-actionlint-${{ inputs.version }} + key: ${{ inputs.cache-seed }}|${{ runner.os }}|${{ runner.arch }}|actionlint|${{ inputs.version }} - name: Setup actionlint shell: bash run: | if ! command -v actionlint; then - bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/${{ inputs.version }}/scripts/download-actionlint.bash) + bash <(curl https://raw.githubusercontent.com/rhysd/actionlint/main/scripts/download-actionlint.bash) ${{ inputs.version }} mv ./actionlint /usr/local/bin/actionlint fi - name: Show actionlint Version diff --git a/.github/actions/setup-pre-commit/action.yml b/.github/actions/setup-pre-commit/action.yml index e7baa0a2aeb5..82b8eef583df 100644 --- a/.github/actions/setup-pre-commit/action.yml +++ b/.github/actions/setup-pre-commit/action.yml @@ -36,7 +36,7 @@ runs: ${{ steps.pre-commit-virtualenv.outputs.python-executable }} -m pip install pre-commit==${{ inputs.version }} - name: Cache Pre-Commit Hooks - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 id: pre-commit-hooks-cache with: key: ${{ steps.pre-commit-virtualenv.outputs.cache-key }}|${{ inputs.version }}|${{ hashFiles('.pre-commit-config.yaml') }} diff --git a/.github/actions/setup-python-tools-scripts/action.yml b/.github/actions/setup-python-tools-scripts/action.yml index 7091c104bdb8..9d5ff710346d 100644 --- a/.github/actions/setup-python-tools-scripts/action.yml +++ b/.github/actions/setup-python-tools-scripts/action.yml @@ -2,6 +2,16 @@ name: setup-python-tools-scripts description: Setup 'python-tools-scripts' +inputs: + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + cwd: + type: string + description: The directory the salt checkout is located in + default: "." + outputs: version: value: ${{ steps.get-version.outputs.version }} @@ -23,14 +33,46 @@ runs: with: python-binary: python3 + - name: Define Cache Hash + id: venv-hash + shell: bash + run: | + VENV_NAME_HASH=$(echo "${{ inputs.cache-prefix }}|${{ github.workflow }}|${{ + steps.get-python-version.outputs.version-sha256sum }}|${{ + hashFiles('requirements/**/*.txt', 'tools/**/*.py') }}" | sha256sum | cut -d ' ' -f 1) + echo "TOOLS_VIRTUALENV_CACHE_SEED=$VENV_NAME_HASH" | tee -a "${GITHUB_ENV}" + echo "venv-hash=$VENV_NAME_HASH" | tee -a "${GITHUB_OUTPUT}" + + - uses: ./.github/actions/cached-virtualenv + id: tools-virtualenv + with: + name: tools.${{ steps.venv-hash.outputs.venv-hash }} + cache-seed: tools|${{ steps.venv-hash.outputs.venv-hash }} + + - name: Restore Python Tools Virtualenvs Cache + uses: actions/cache@v3.3.1 + with: + path: ${{ inputs.cwd }}/.tools-venvs + key: ${{ inputs.cache-prefix }}|${{ steps.venv-hash.outputs.venv-hash }} + - name: Install 'python-tools-scripts' shell: bash + working-directory: ${{ inputs.cwd }} run: | - python3 -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + PYTHON_EXE=${{ steps.tools-virtualenv.outputs.python-executable }} + (${PYTHON_EXE} -m pip install --help | grep break-system-packages > /dev/null 2>&1) && exitcode=0 || exitcode=1 + if [ $exitcode -eq 0 ]; then + ${PYTHON_EXE} -m pip install --break-system-packages -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + else + ${PYTHON_EXE} -m pip install -r requirements/static/ci/py${{ steps.get-python-version.outputs.version }}/tools.txt + fi - name: Get 'python-tools-scripts' Version id: get-version shell: bash + working-directory: ${{ inputs.cwd }} run: | - VERSION=$(tools --version) + # The first time `tools` runs with newer virtual enviroments we need to disregard the output + tools --debug --version + VERSION=$(tools --version | tail -n 1) echo "version=$VERSION" >> "${GITHUB_OUTPUT}" diff --git a/.github/actions/setup-relenv/action.yml b/.github/actions/setup-relenv/action.yml index 99e96686b1d5..c4cfd33f5456 100644 --- a/.github/actions/setup-relenv/action.yml +++ b/.github/actions/setup-relenv/action.yml @@ -22,7 +22,7 @@ inputs: required: false type: string description: The version of relenv to use - default: 0.6.0 + default: 0.13.2 outputs: version: @@ -45,7 +45,7 @@ runs: python3 -m pip install relenv==${{ inputs.version }} - name: Cache Relenv Data Directory - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: path: ${{ github.workspace }}/.relenv key: ${{ inputs.cache-seed }}|relenv|${{ inputs.version }}|${{ inputs.python-version }}|${{ inputs.platform }}|${{ inputs.arch }} diff --git a/.github/actions/setup-shellcheck/action.yml b/.github/actions/setup-shellcheck/action.yml index 9471f9b48b96..2c86c98a0726 100644 --- a/.github/actions/setup-shellcheck/action.yml +++ b/.github/actions/setup-shellcheck/action.yml @@ -5,16 +5,20 @@ inputs: version: description: The version of shellcheck default: v0.9.0 + cache-seed: + required: true + type: string + description: Seed used to invalidate caches runs: using: composite steps: - name: Cache shellcheck Binary - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: path: /usr/local/bin/shellcheck - key: ${{ runner.os }}-${{ runner.arch }}-shellcheck-${{ inputs.version }} + key: ${{ inputs.cache-seed }}|${{ runner.os }}|${{ runner.arch }}|shellcheck|${{ inputs.version }} - name: Setup shellcheck shell: bash diff --git a/.github/config.yml b/.github/config.yml index 633569da991d..6bf3cadf069f 100644 --- a/.github/config.yml +++ b/.github/config.yml @@ -13,7 +13,7 @@ newIssueWelcomeComment: > - [Community Wiki](https://github.com/saltstack/community/wiki) - [Salt’s Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html) - - [Join our Community Slack](https://join.slack.com/t/saltstackcommunity/shared_invite/zt-3av8jjyf-oBQ2M0vhXOhJpNpRkPWBvg) + - [Join our Community Slack](https://via.vmw.com/salt-slack) - [IRC on LiberaChat](https://web.libera.chat/#salt) - [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg) - [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss) @@ -39,7 +39,7 @@ newPRWelcomeComment: > - [Community Wiki](https://github.com/saltstack/community/wiki) - [Salt’s Contributor Guide](https://docs.saltproject.io/en/master/topics/development/contributing.html) - - [Join our Community Slack](https://join.slack.com/t/saltstackcommunity/shared_invite/zt-3av8jjyf-oBQ2M0vhXOhJpNpRkPWBvg) + - [Join our Community Slack](https://via.vmw.com/salt-slack) - [IRC on LiberaChat](https://web.libera.chat/#salt) - [Salt Project YouTube channel](https://www.youtube.com/channel/UCpveTIucFx9ljGelW63-BWg) - [Salt Project Twitch channel](https://www.twitch.tv/saltprojectoss) diff --git a/.github/workflows/backport.yml b/.github/workflows/backport.yml new file mode 100644 index 000000000000..b70b84df5b3f --- /dev/null +++ b/.github/workflows/backport.yml @@ -0,0 +1,48 @@ +name: Backport PR +run-name: "Backport PR #${{ github.event.number }}" + +on: + pull_request_target: + types: + - "labeled" + - "closed" + +permissions: + contents: write + pull-requests: write + +jobs: + backport: + name: Backport PR + runs-on: + - ubuntu-latest + if: | + github.event.pull_request.merged == true + && ( + contains(github.event.pull_request.labels.*.name, 'backport:master') || + contains(github.event.pull_request.labels.*.name, 'backport:3006.x') || + contains(github.event.pull_request.labels.*.name, 'backport:3005.x') + ) + && ( + (github.event.action == 'labeled' && ( + contains(github.event.pull_request.labels.*.name, 'backport:master') || + contains(github.event.pull_request.labels.*.name, 'backport:3006.x') || + contains(github.event.pull_request.labels.*.name, 'backport:3005.x') + )) + || (github.event.action == 'closed') + ) + steps: + - name: Backport Action + uses: sqren/backport-github-action@v8.9.7 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + auto_backport_label_prefix: "backport:" + add_original_reviewers: true + + - name: Info log + if: ${{ success() }} + run: jq -C '.' ~/.backport/backport.info.log + + - name: Debug log + if: ${{ failure() }} + run: jq -C '.' ~/.backport/backport.debug.log diff --git a/.github/workflows/build-deb-packages.yml b/.github/workflows/build-deb-packages.yml index b5d5885b98c8..4d7bbdcc8241 100644 --- a/.github/workflows/build-deb-packages.yml +++ b/.github/workflows/build-deb-packages.yml @@ -1,5 +1,5 @@ --- -name: Build Debian Packages +name: Build DEB Packages on: workflow_call: @@ -8,6 +8,29 @@ on: type: string required: true description: The Salt version to set prior to building packages. + relenv-version: + type: string + required: true + description: The relenv version to set prior to building packages. + python-version: + required: true + type: string + description: The version of python to use with relenv + source: + required: true + type: string + description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + +env: + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple jobs: build: @@ -22,12 +45,19 @@ jobs: arch: - x86_64 - aarch64 + source: + - ${{ inputs.source }} container: - image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-11 + image: ghcr.io/saltstack/salt-ci-containers/packaging:debian-12 steps: - - uses: actions/checkout@v3 + # Checkout here so we can easily use custom actions + - uses: actions/checkout@v4 + + # Checkout here for the build process + - name: Checkout in build directory + uses: actions/checkout@v4 with: path: pkgs/checkout/ @@ -38,47 +68,65 @@ jobs: name: salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz path: pkgs/checkout/artifacts/ - - name: Download System Dependencies - run: | - apt update - apt install -y python3 python3-venv python3-pip build-essential devscripts debhelper bash-completion git - - name: Download Release Patch + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: actions/download-artifact@v3 with: name: salt-${{ inputs.salt-version }}.patch path: pkgs/checkout/ + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cwd: pkgs/checkout/ + cache-prefix: ${{ inputs.cache-prefix }} + + - name: Setup Salt Version + id: setup-salt-version + uses: ./.github/actions/setup-salt-version + with: + salt-version: "${{ inputs.salt-version }}" + cwd: pkgs/checkout/ + - name: Configure Git - shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} + working-directory: pkgs/checkout/ run: | - git config --global user.name "Salt Project Packaging" - git config --global user.email saltproject-packaging@vmware.com + tools pkg configure-git - - name: Apply Release Patch - shell: bash + - name: Apply release patch + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} + working-directory: pkgs/checkout/ run: | - cd pkgs/checkout/ - git am --committer-date-is-author-date salt-${{ inputs.salt-version }}.patch - rm salt-${{ inputs.salt-version }}.patch + tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete - name: Build Deb - env: - SALT_ONEDIR_ARCHIVE: "${{ github.workspace }}/pkgs/checkout/artifacts/salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz" + working-directory: pkgs/checkout/ run: | - cd pkgs/checkout/ - echo "${{ inputs.salt-version }}" > salt/_version.txt - ln -sf pkg/debian/ . - debuild -e SALT_ONEDIR_ARCHIVE -uc -us + tools pkg build deb --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{ + inputs.source == 'onedir' && + format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch) + || + format('--arch={0}', matrix.arch) + }} - name: Cleanup run: | rm -rf pkgs/checkout/ + - name: Set Artifact Name + id: set-artifact-name + run: | + if [ "${{ inputs.source }}" != "src" ]; then + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb" >> "$GITHUB_OUTPUT" + else + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb-from-src" >> "$GITHUB_OUTPUT" + fi + - name: Upload DEBs uses: actions/upload-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-deb + name: ${{ steps.set-artifact-name.outputs.artifact-name }} path: ${{ github.workspace }}/pkgs/* retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/build-deps-ci-action-macos.yml b/.github/workflows/build-deps-ci-action-macos.yml new file mode 100644 index 000000000000..125a6713a85e --- /dev/null +++ b/.github/workflows/build-deps-ci-action-macos.yml @@ -0,0 +1,135 @@ +--- +name: Install Test Dependencies + +on: + workflow_call: + inputs: + distro-slug: + required: true + type: string + description: The OS slug to run tests against + nox-session: + required: true + type: string + description: The nox session to run + salt-version: + type: string + required: true + description: The Salt version to set prior to running tests. + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + platform: + required: true + type: string + description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested + nox-version: + required: true + type: string + description: The nox version to install + python-version: + required: false + type: string + description: The python version to run tests with + default: "3.10" + package-name: + required: false + type: string + description: The onedir package name to use + default: salt + + +env: + COLUMNS: 190 + PIP_INDEX_URL: "https://pypi-proxy.saltstack.net/root/local/+simple/" + PIP_EXTRA_INDEX_URL: "https://pypi.org/simple" + GITHUB_ACTIONS_PIPELINE: "1" + +jobs: + + dependencies: + name: Install + runs-on: ${{ inputs.distro-slug }} + timeout-minutes: 90 + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + id: nox-dependencies-cache + uses: actions/cache@v3.3.1 + with: + path: nox.${{ inputs.distro-slug }}.tar.* + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + + - name: Download Onedir Tarball as an Artifact + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/ + + - name: Decompress Onedir Tarball + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + + - name: Set up Python ${{ inputs.python-version }} + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: actions/setup-python@v4 + with: + python-version: "${{ inputs.python-version }}" + + - name: Install System Dependencies + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + brew install openssl@3 + + - name: Install Nox + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + python3 -m pip install 'nox==${{ inputs.nox-version }}' + + - name: Install Dependencies + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + env: + PRINT_TEST_SELECTION: "0" + PRINT_SYSTEM_INFO: "0" + run: | + export PYCURL_SSL_LIBRARY=openssl + export LDFLAGS="-L/usr/local/opt/openssl@3/lib" + export CPPFLAGS="-I/usr/local/opt/openssl@3/include" + export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig" + nox --install-only -e ${{ inputs.nox-session }} + + - name: Cleanup .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + nox --force-color -e "pre-archive-cleanup(pkg=False)" + + - name: Compress .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + nox --force-color -e compress-dependencies -- ${{ inputs.distro-slug }} + + - name: Upload Nox Requirements Tarball + uses: actions/upload-artifact@v3 + with: + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + path: nox.${{ inputs.distro-slug }}.tar.* diff --git a/.github/workflows/build-deps-ci-action.yml b/.github/workflows/build-deps-ci-action.yml new file mode 100644 index 000000000000..0d596c0e1dcf --- /dev/null +++ b/.github/workflows/build-deps-ci-action.yml @@ -0,0 +1,160 @@ +--- +name: Install Test Dependencies + +on: + workflow_call: + inputs: + distro-slug: + required: true + type: string + description: The OS slug to run tests against + nox-session: + required: true + type: string + description: The nox session to run + salt-version: + type: string + required: true + description: The Salt version to set prior to running tests. + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + platform: + required: true + type: string + description: The platform being tested + arch: + required: true + type: string + description: The platform arch being tested + nox-version: + required: true + type: string + description: The nox version to install + python-version: + required: false + type: string + description: The python version to run tests with + default: "3.10" + package-name: + required: false + type: string + description: The onedir package name to use + default: salt + + +env: + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + dependencies: + name: Install + runs-on: + - self-hosted + - linux + - bastion + timeout-minutes: 90 + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" + + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + id: nox-dependencies-cache + uses: actions/cache@v3.3.1 + with: + path: nox.${{ inputs.distro-slug }}.tar.* + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + + - name: Download Onedir Tarball as an Artifact + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + path: artifacts/ + + - name: Decompress Onedir Tarball + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + + - name: PyPi Proxy + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + + - name: Setup Python Tools Scripts + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-build-deps-ci + + - name: Get Salt Project GitHub Actions Bot Environment + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }} + + - name: List Free Space + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm rsync ${{ inputs.distro-slug }} + + - name: Install Dependencies + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm install-dependencies --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} + + - name: Cleanup .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} + + - name: Compress .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} + + - name: Download Compressed .nox Directory + if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} + + - name: Destroy VM + if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' + run: | + tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} + + - name: Upload Nox Requirements Tarball + uses: actions/upload-artifact@v3 + with: + name: nox-${{ inputs.distro-slug }}-${{ inputs.nox-session }} + path: nox.${{ inputs.distro-slug }}.tar.* diff --git a/.github/workflows/build-deps-onedir-linux.yml b/.github/workflows/build-deps-onedir-linux.yml new file mode 100644 index 000000000000..8d149c462611 --- /dev/null +++ b/.github/workflows/build-deps-onedir-linux.yml @@ -0,0 +1,83 @@ +--- +name: Build Packaging Dependencies Onedir + +on: + workflow_call: + inputs: + salt-version: + type: string + required: true + description: The Salt version to set prior to building packages. + github-hosted-runners: + type: boolean + required: true + self-hosted-runners: + type: boolean + required: true + cache-seed: + required: true + type: string + description: Seed used to invalidate caches + relenv-version: + required: true + type: string + description: The version of relenv to use + python-version: + required: true + type: string + description: The version of python to use with relenv + +env: + RELENV_DATA: "${{ github.workspace }}/.relenv" + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + build-deps-linux: + name: Linux + if: ${{ inputs.self-hosted-runners }} + strategy: + fail-fast: false + matrix: + arch: + - x86_64 + - aarch64 + runs-on: + - self-hosted + - linux + - ${{ matrix.arch }} + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - uses: actions/checkout@v4 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-deps-linux-${{ matrix.arch }} + + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: linux + arch: ${{ matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + + - name: Install Salt Packaging Dependencies into Relenv Onedir + uses: ./.github/actions/build-onedir-deps + with: + platform: linux + arch: ${{ matrix.arch }} + python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-deps-onedir-macos.yml b/.github/workflows/build-deps-onedir-macos.yml new file mode 100644 index 000000000000..2886c3f993d1 --- /dev/null +++ b/.github/workflows/build-deps-onedir-macos.yml @@ -0,0 +1,88 @@ +--- +name: Build Packaging Dependencies Onedir + +on: + workflow_call: + inputs: + salt-version: + type: string + required: true + description: The Salt version to set prior to building packages. + github-hosted-runners: + type: boolean + required: true + self-hosted-runners: + type: boolean + required: true + cache-seed: + required: true + type: string + description: Seed used to invalidate caches + relenv-version: + required: true + type: string + description: The version of relenv to use + python-version: + required: true + type: string + description: The version of python to use with relenv + +env: + RELENV_DATA: "${{ github.workspace }}/.relenv" + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + build-deps-macos: + name: macOS + if: ${{ inputs.github-hosted-runners }} + strategy: + fail-fast: false + max-parallel: 2 + matrix: + arch: + - x86_64 + - aarch64 + runs-on: + - ${{ matrix.arch == 'aarch64' && 'macos-13-xlarge' || 'macos-12' }} + + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - uses: actions/checkout@v4 + + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-deps-macos + + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: darwin + arch: ${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + + - name: Install Salt Packaging Dependencies into Relenv Onedir + uses: ./.github/actions/build-onedir-deps + with: + platform: darwin + arch: ${{ matrix.arch }} + python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-deps-onedir-windows.yml b/.github/workflows/build-deps-onedir-windows.yml new file mode 100644 index 000000000000..af741e062242 --- /dev/null +++ b/.github/workflows/build-deps-onedir-windows.yml @@ -0,0 +1,86 @@ +--- +name: Build Packaging Dependencies Onedir + +on: + workflow_call: + inputs: + salt-version: + type: string + required: true + description: The Salt version to set prior to building packages. + github-hosted-runners: + type: boolean + required: true + self-hosted-runners: + type: boolean + required: true + cache-seed: + required: true + type: string + description: Seed used to invalidate caches + relenv-version: + required: true + type: string + description: The version of relenv to use + python-version: + required: true + type: string + description: The version of python to use with relenv + +env: + RELENV_DATA: "${{ github.workspace }}/.relenv" + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + build-deps-windows: + name: Windows + if: ${{ inputs.github-hosted-runners }} + strategy: + fail-fast: false + max-parallel: 2 + matrix: + arch: + - x86 + - amd64 + runs-on: windows-latest + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - uses: actions/checkout@v4 + + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-deps-windows-${{ matrix.arch }} + + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: windows + arch: ${{ matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + + - name: Install Salt Packaging Dependencies into Relenv Onedir + uses: ./.github/actions/build-onedir-deps + with: + platform: windows + arch: ${{ matrix.arch }} + python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-deps-onedir.yml b/.github/workflows/build-deps-onedir.yml deleted file mode 100644 index eaf5f0c5aac3..000000000000 --- a/.github/workflows/build-deps-onedir.yml +++ /dev/null @@ -1,162 +0,0 @@ ---- -name: Build Packaging Dependencies Onedir - -on: - workflow_call: - inputs: - salt-version: - type: string - required: true - description: The Salt version to set prior to building packages. - github-hosted-runners: - type: boolean - required: true - self-hosted-runners: - type: boolean - required: true - cache-seed: - required: true - type: string - description: Seed used to invalidate caches - relenv-version: - required: false - type: string - default: 0.6.0 - description: The version of relenv to use - python-version-linux: - required: false - type: string - default: 3.10.9 - description: The version of python to use with relenv - python-version-macos: - required: false - type: string - default: 3.10.9 - description: The version of python to use with relenv - python-version-windows: - required: false - type: string - description: The version of python to use with relenv on Windows - default: 3.10.9 - -env: - RELENV_DATA: "${{ github.workspace }}/.relenv" - -jobs: - - build-deps-linux: - name: Linux - if: ${{ inputs.self-hosted-runners }} - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - - aarch64 - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - steps: - - uses: actions/checkout@v3 - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: linux - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-linux }} - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: linux - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version-linux }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} - - - build-deps-windows: - name: Windows - if: ${{ inputs.github-hosted-runners }} - strategy: - fail-fast: false - max-parallel: 2 - matrix: - arch: - - x86 - - amd64 - runs-on: windows-latest - steps: - - uses: actions/checkout@v3 - - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: windows - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-windows }} - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: windows - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version-windows }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} - - - build-deps-macos: - name: macOS - if: ${{ inputs.github-hosted-runners }} - strategy: - fail-fast: false - max-parallel: 2 - matrix: - arch: - - x86_64 - runs-on: macos-12 - steps: - - uses: actions/checkout@v3 - - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: darwin - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-macos }} - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Install Salt Packaging Dependencies into Relenv Onedir - uses: ./.github/actions/build-onedir-deps - with: - platform: darwin - arch: ${{ matrix.arch }} - python-version: "${{ inputs.python-version-macos }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index 8546f096337c..2afb5d8ce29d 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -13,6 +13,13 @@ on: type: string description: Seed used to invalidate caches +env: + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + jobs: build: name: Build @@ -32,26 +39,14 @@ jobs: # - pdf steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download Release Patch + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: actions/download-artifact@v3 with: name: salt-${{ inputs.salt-version }}.patch - - name: Configure Git - shell: bash - run: | - git config --global user.name "Salt Project Packaging" - git config --global user.email saltproject-packaging@vmware.com - git config --global --add safe.directory "$(pwd)" - - - name: Apply Release Patch - shell: bash - run: | - git am --committer-date-is-author-date salt-${{ inputs.salt-version }}.patch - rm salt-${{ inputs.salt-version }}.patch - - name: Get Python Version id: get-python-version uses: ./.github/actions/get-python-version @@ -61,10 +56,22 @@ jobs: - name: Setup Python Tools Scripts id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }} + + - name: Configure Git + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} + run: | + tools pkg configure-git + + - name: Apply release patch + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} + run: | + tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete - name: Cache Python Tools Docs Virtualenv id: tools-venvs-dependencies-cache - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: path: .tools-venvs/docs key: ${{ inputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} diff --git a/.github/workflows/build-macos-packages.yml b/.github/workflows/build-macos-packages.yml index f7278ffe8f8a..9b3324893abd 100644 --- a/.github/workflows/build-macos-packages.yml +++ b/.github/workflows/build-macos-packages.yml @@ -8,26 +8,86 @@ on: type: string required: true description: The Salt version to set prior to building packages. + relenv-version: + type: string + required: true + description: The relenv version to set prior to building packages. + python-version: + required: true + type: string + description: The version of python to use with relenv + sign-packages: + type: boolean + default: false + description: Sign Packages + environment: + type: string + description: The GitHub Environment where this workflow should run + default: ci + source: + required: true + type: string + description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + +env: + COLUMNS: 190 + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple jobs: build-pkgs: name: macOS + environment: ${{ inputs.environment }} strategy: fail-fast: false matrix: arch: - x86_64 + - aarch64 + source: + - ${{ inputs.source }} + runs-on: - - macos-12 + - ${{ matrix.arch == 'aarch64' && 'macos-13-xlarge' || 'macos-12' }} + steps: - - uses: actions/checkout@v3 + - name: Check Package Signing Enabled + shell: bash + id: check-pkg-sign + run: | + if [ "${{ inputs.sign-packages }}" == "true" ]; then + if [ "${{ (secrets.MAC_SIGN_APPLE_ACCT != '' && contains(fromJSON('["nightly", "staging"]'), inputs.environment)) && 'true' || 'false' }}" != "true" ]; then + MSG="Secrets for signing packages are not available. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + else + MSG="The packages created WILL be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + fi + else + MSG="The sign-packages input is false. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + fi + + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: 3.11 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version @@ -39,26 +99,60 @@ jobs: uses: actions/download-artifact@v3 with: name: salt-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz + path: artifacts/ + + - name: Prepare Package Signing + if: ${{ steps.check-pkg-sign.outputs.sign-pkgs == 'true' }} + run: | + echo ${{ secrets.MAC_SIGN_DEV_APP_CERT_B64 }} | base64 --decode > app-cert.p12 + echo ${{ secrets.MAC_SIGN_DEV_INSTALL_CERT_B64 }} | base64 --decode > install-cert.p12 + # Create SaltSigning keychain. This will contain the certificates for signing + security create-keychain -p "${{ secrets.MAC_SIGN_DEV_PASSWORD }}" "${{ secrets.MAC_SIGN_DEV_KEYCHAIN }}" + # Append SaltSigning keychain to the search list + security list-keychains -d user -s "${{ secrets.MAC_SIGN_DEV_KEYCHAIN }}" "$(security list-keychains -d user | sed s/\"//g)" + # Unlock the keychain so we can import certs + security unlock-keychain -p "${{ secrets.MAC_SIGN_DEV_PASSWORD }}" "${{ secrets.MAC_SIGN_DEV_KEYCHAIN }}" + # Developer Application Certificate + security import "app-cert.p12" -t agg -k "${{ secrets.MAC_SIGN_DEV_KEYCHAIN }}" -P "${{ secrets.MAC_SIGN_DEV_PASSWORD }}" -A + rm app-cert.p12 + # Developer Installer Certificate + security import "install-cert.p12" -t agg -k "${{ secrets.MAC_SIGN_DEV_KEYCHAIN }}" -P "${{ secrets.MAC_SIGN_DEV_PASSWORD }}" -A + rm install-cert.p12 + security set-key-partition-list -S apple-tool:,apple: -k "${{ secrets.MAC_SIGN_DEV_PASSWORD }}" "${{ secrets.MAC_SIGN_DEV_KEYCHAIN }}" &> /dev/null - - name: Decompress Onedir Tarball + - name: Build MacOS Package + env: + DEV_APP_CERT: "${{ secrets.MAC_SIGN_DEV_APP_CERT }}" + DEV_INSTALL_CERT: "${{ secrets.MAC_SIGN_DEV_INSTALL_CERT }}" + APPLE_ACCT: "${{ secrets.MAC_SIGN_APPLE_ACCT }}" + APPLE_TEAM_ID: "${{ secrets.MAC_SIGN_APPLE_TEAM_ID }}" + APP_SPEC_PWD: "${{ secrets.MAC_SIGN_APP_SPEC_PWD }}" run: | - mkdir -p artifacts - cd artifacts - tar xvf ${{ github.workspace }}/salt-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch }}.tar.xz - cd ${{ github.workspace }} - mkdir -p pkg/macos/build/opt - cp -Rp artifacts/salt pkg/macos/build/opt/ + tools pkg build macos --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{ + inputs.source == 'onedir' && + format( + '--onedir salt-{0}-onedir-darwin-{1}.tar.xz --salt-version {0} {2}', + inputs.salt-version, + matrix.arch, + steps.check-pkg-sign.outputs.sign-pkgs == 'true' && '--sign' || '' + ) + || + format('--salt-version {0}', inputs.salt-version) + }} - - name: Build Package + - name: Set Artifact Name + id: set-artifact-name run: | - cd pkg/macos - ./prep_salt.sh - sudo ./package.sh -n ${{ inputs.salt-version }} + if [ "${{ inputs.source }}" != "src" ]; then + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos" >> "$GITHUB_OUTPUT" + else + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos-from-src" >> "$GITHUB_OUTPUT" + fi - name: Upload ${{ matrix.arch }} Package uses: actions/upload-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-macos - path: pkg/macos/*unsigned.pkg + name: ${{ steps.set-artifact-name.outputs.artifact-name }} + path: pkg/macos/salt-${{ inputs.salt-version }}-py3-*.pkg retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/build-rpm-packages.yml b/.github/workflows/build-rpm-packages.yml index 941d55495c6d..1b2103700c94 100644 --- a/.github/workflows/build-rpm-packages.yml +++ b/.github/workflows/build-rpm-packages.yml @@ -8,9 +8,29 @@ on: type: string required: true description: The Salt version to set prior to building packages. + relenv-version: + type: string + required: true + description: The relenv version to set prior to building packages. + python-version: + required: true + type: string + description: The version of python to use with relenv + source: + required: true + type: string + description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches env: COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple jobs: build: @@ -25,17 +45,14 @@ jobs: arch: - x86_64 - aarch64 + source: + - ${{ inputs.source }} container: image: ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9 steps: - - uses: actions/checkout@v3 - - - name: Download System Dependencies - run: | - yum -y update - yum -y install python3 python3-pip openssl git rpmdevtools rpmlint systemd-units libxcrypt-compat git + - uses: actions/checkout@v4 - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 @@ -44,34 +61,54 @@ jobs: path: artifacts/ - name: Download Release Patch + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: actions/download-artifact@v3 with: name: salt-${{ inputs.salt-version }}.patch + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} + + - name: Setup Salt Version + id: setup-salt-version + uses: ./.github/actions/setup-salt-version + with: + salt-version: "${{ inputs.salt-version }}" + - name: Configure Git - shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - git config --global user.name "Salt Project Packaging" - git config --global user.email saltproject-packaging@vmware.com - git config --global --add safe.directory "$(pwd)" + tools pkg configure-git - - name: Apply Release Patch - shell: bash + - name: Apply release patch + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - git am --committer-date-is-author-date salt-${{ inputs.salt-version }}.patch - rm salt-${{ inputs.salt-version }}.patch + tools pkg apply-release-patch salt-${{ inputs.salt-version }}.patch --delete - name: Build RPM - env: - SALT_ONEDIR_ARCHIVE: "${{ github.workspace }}/artifacts/salt-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch }}.tar.xz" run: | - echo "${{ inputs.salt-version }}" > salt/_version.txt - rpmbuild -bb --define="_salt_src $(pwd)" "$(pwd)/pkg/rpm/salt.spec" + tools pkg build rpm --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{ + inputs.source == 'onedir' && + format('--onedir=salt-{0}-onedir-linux-{1}.tar.xz', inputs.salt-version, matrix.arch) + || + format('--arch={0}', matrix.arch) + }} + + - name: Set Artifact Name + id: set-artifact-name + run: | + if [ "${{ inputs.source }}" != "src" ]; then + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm" >> "$GITHUB_OUTPUT" + else + echo "artifact-name=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm-from-src" >> "$GITHUB_OUTPUT" + fi - name: Upload RPMs uses: actions/upload-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-rpm + name: ${{ steps.set-artifact-name.outputs.artifact-name }} path: ~/rpmbuild/RPMS/${{ matrix.arch }}/*.rpm retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/build-salt-onedir-linux.yml b/.github/workflows/build-salt-onedir-linux.yml new file mode 100644 index 000000000000..a7e197b760c0 --- /dev/null +++ b/.github/workflows/build-salt-onedir-linux.yml @@ -0,0 +1,90 @@ +--- +name: Build Salt Onedir + +on: + workflow_call: + inputs: + salt-version: + type: string + required: true + description: The Salt version to set prior to building packages. + github-hosted-runners: + type: boolean + required: true + self-hosted-runners: + type: boolean + required: true + cache-seed: + required: true + type: string + description: Seed used to invalidate caches + relenv-version: + required: true + type: string + description: The version of relenv to use + python-version: + required: true + type: string + description: The version of python to use with relenv + +env: + RELENV_DATA: "${{ github.workspace }}/.relenv" + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + build-salt-linux: + name: Linux + if: ${{ inputs.self-hosted-runners }} + strategy: + fail-fast: false + matrix: + arch: + - x86_64 + - aarch64 + runs-on: + - self-hosted + - linux + - ${{ matrix.arch }} + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - uses: actions/checkout@v4 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-windows + + - name: Setup Salt Version + id: setup-salt-version + uses: ./.github/actions/setup-salt-version + with: + salt-version: "${{ inputs.salt-version }}" + + - name: Setup Relenv + uses: ./.github/actions/setup-relenv + id: setup-relenv + with: + platform: linux + arch: ${{ matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + + - name: Install Salt into Relenv Onedir + uses: ./.github/actions/build-onedir-salt + with: + platform: linux + arch: ${{ matrix.arch }} + salt-version: "${{ inputs.salt-version }}" + python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-salt-onedir-macos.yml b/.github/workflows/build-salt-onedir-macos.yml new file mode 100644 index 000000000000..3697e51e3f09 --- /dev/null +++ b/.github/workflows/build-salt-onedir-macos.yml @@ -0,0 +1,93 @@ +--- +name: Build Salt Onedir + +on: + workflow_call: + inputs: + salt-version: + type: string + required: true + description: The Salt version to set prior to building packages. + github-hosted-runners: + type: boolean + required: true + self-hosted-runners: + type: boolean + required: true + cache-seed: + required: true + type: string + description: Seed used to invalidate caches + relenv-version: + required: true + type: string + description: The version of relenv to use + python-version: + required: true + type: string + description: The version of python to use with relenv + +env: + RELENV_DATA: "${{ github.workspace }}/.relenv" + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + build-salt-macos: + name: macOS + if: ${{ inputs.github-hosted-runners }} + strategy: + fail-fast: false + max-parallel: 2 + matrix: + arch: + - x86_64 + - aarch64 + runs-on: + - ${{ matrix.arch == 'aarch64' && 'macos-13-xlarge' || 'macos-12' }} + + steps: + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - uses: actions/checkout@v4 + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: darwin + arch: ${{ matrix.arch == 'aarch64' && 'arm64' || matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-macos + + - name: Setup Salt Version + id: setup-salt-version + uses: ./.github/actions/setup-salt-version + with: + salt-version: "${{ inputs.salt-version }}" + + - name: Install Salt into Relenv Onedir + uses: ./.github/actions/build-onedir-salt + with: + platform: darwin + arch: ${{ matrix.arch }} + salt-version: "${{ inputs.salt-version }}" + python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-salt-onedir-windows.yml b/.github/workflows/build-salt-onedir-windows.yml new file mode 100644 index 000000000000..aba0b4245530 --- /dev/null +++ b/.github/workflows/build-salt-onedir-windows.yml @@ -0,0 +1,93 @@ +--- +name: Build Salt Onedir + +on: + workflow_call: + inputs: + salt-version: + type: string + required: true + description: The Salt version to set prior to building packages. + github-hosted-runners: + type: boolean + required: true + self-hosted-runners: + type: boolean + required: true + cache-seed: + required: true + type: string + description: Seed used to invalidate caches + relenv-version: + required: true + type: string + description: The version of relenv to use + python-version: + required: true + type: string + description: The version of python to use with relenv + +env: + RELENV_DATA: "${{ github.workspace }}/.relenv" + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + build-salt-windows: + name: Windows + if: ${{ inputs.github-hosted-runners }} + strategy: + fail-fast: false + max-parallel: 2 + matrix: + arch: + - x86 + - amd64 + runs-on: windows-latest + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - uses: actions/checkout@v4 + + - name: Set up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Relenv + id: setup-relenv + uses: ./.github/actions/setup-relenv + with: + platform: windows + arch: ${{ matrix.arch }} + version: ${{ inputs.relenv-version }} + cache-seed: ${{ inputs.cache-seed }} + python-version: ${{ inputs.python-version }} + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-seed }}-build-salt-onedir-macos + + - name: Setup Salt Version + id: setup-salt-version + uses: ./.github/actions/setup-salt-version + with: + salt-version: "${{ inputs.salt-version }}" + + - name: Install Salt into Relenv Onedir + uses: ./.github/actions/build-onedir-salt + with: + platform: windows + arch: ${{ matrix.arch }} + salt-version: "${{ inputs.salt-version }}" + python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-salt-onedir.yml b/.github/workflows/build-salt-onedir.yml deleted file mode 100644 index eacccb04c283..000000000000 --- a/.github/workflows/build-salt-onedir.yml +++ /dev/null @@ -1,182 +0,0 @@ ---- -name: Build Salt Onedir - -on: - workflow_call: - inputs: - salt-version: - type: string - required: true - description: The Salt version to set prior to building packages. - github-hosted-runners: - type: boolean - required: true - self-hosted-runners: - type: boolean - required: true - cache-seed: - required: true - type: string - description: Seed used to invalidate caches - relenv-version: - required: false - type: string - default: 0.6.0 - description: The version of relenv to use - python-version-linux: - required: false - type: string - default: 3.10.9 - description: The version of python to use with relenv - python-version-macos: - required: false - type: string - default: 3.10.9 - description: The version of python to use with relenv - python-version-windows: - required: false - type: string - description: The version of python to use with relenv on Windows - default: 3.10.9 - -env: - RELENV_DATA: "${{ github.workspace }}/.relenv" - -jobs: - - build-salt-linux: - name: Linux - if: ${{ inputs.self-hosted-runners }} - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - - aarch64 - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - steps: - - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Setup Salt Version - id: setup-salt-version - uses: ./.github/actions/setup-salt-version - with: - salt-version: "${{ inputs.salt-version }}" - - - name: Setup Relenv - uses: ./.github/actions/setup-relenv - id: setup-relenv - with: - platform: linux - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-linux }} - - - name: Install Salt into Relenv Onedir - uses: ./.github/actions/build-onedir-salt - with: - platform: linux - arch: ${{ matrix.arch }} - salt-version: "${{ inputs.salt-version }}" - python-version: "${{ inputs.python-version-linux }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} - - - build-salt-windows: - name: Windows - if: ${{ inputs.github-hosted-runners }} - strategy: - fail-fast: false - max-parallel: 2 - matrix: - arch: - - x86 - - amd64 - runs-on: windows-latest - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: windows - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-windows }} - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Setup Salt Version - id: setup-salt-version - uses: ./.github/actions/setup-salt-version - with: - salt-version: "${{ inputs.salt-version }}" - - - name: Install Salt into Relenv Onedir - uses: ./.github/actions/build-onedir-salt - with: - platform: windows - arch: ${{ matrix.arch }} - salt-version: "${{ inputs.salt-version }}" - python-version: "${{ inputs.python-version-windows }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} - - - build-salt-macos: - name: macOS - if: ${{ inputs.github-hosted-runners }} - strategy: - fail-fast: false - max-parallel: 2 - matrix: - arch: - - x86_64 - runs-on: macos-12 - steps: - - uses: actions/checkout@v3 - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - with: - python-version: "3.10" - - - name: Setup Relenv - id: setup-relenv - uses: ./.github/actions/setup-relenv - with: - platform: darwin - arch: ${{ matrix.arch }} - version: ${{ inputs.relenv-version }} - cache-seed: ${{ inputs.cache-seed }} - python-version: ${{ inputs.python-version-macos }} - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Setup Salt Version - id: setup-salt-version - uses: ./.github/actions/setup-salt-version - with: - salt-version: "${{ inputs.salt-version }}" - - - name: Install Salt into Relenv Onedir - uses: ./.github/actions/build-onedir-salt - with: - platform: darwin - arch: ${{ matrix.arch }} - salt-version: "${{ inputs.salt-version }}" - python-version: "${{ inputs.python-version-macos }}" - cache-prefix: ${{ inputs.cache-seed }}|relenv|${{ steps.setup-relenv.outputs.version }} diff --git a/.github/workflows/build-windows-packages.yml b/.github/workflows/build-windows-packages.yml index 9a480dc86e23..5e8e46635259 100644 --- a/.github/workflows/build-windows-packages.yml +++ b/.github/workflows/build-windows-packages.yml @@ -7,12 +7,44 @@ on: salt-version: type: string required: true - description: The Salt version to set prior to building packages. + description: The Salt version to set prior to building packages + relenv-version: + type: string + required: true + description: The relenv version to set prior to building packages. + python-version: + required: true + type: string + description: The version of python to use with relenv + sign-packages: + type: boolean + default: false + description: Sign Packages + environment: + type: string + description: The GitHub Environment where this workflow should run + default: ci + source: + required: true + type: string + description: The backend to build the packages with + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + +env: + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple jobs: build-pkgs: name: Windows + environment: ${{ inputs.environment }} strategy: fail-fast: false max-parallel: 2 @@ -20,16 +52,52 @@ jobs: arch: - x86 - amd64 + source: + - ${{ inputs.source }} + runs-on: - windows-latest + env: + SM_HOST: "${{ secrets.WIN_SIGN_HOST_PROD }}" + SM_API_KEY: "${{ secrets.WIN_SIGN_API_KEY }}" + SM_CLIENT_CERT_FILE: "D:\\Certificate_pkcs12.p12" + SM_CLIENT_CERT_PASSWORD: "${{ secrets.WIN_SIGN_CERT_PASSWORD }}" + SM_CLIENT_CERT_FILE_B64: "${{ secrets.WIN_SIGN_CERT_FILE_B64 }}" + WIN_SIGN_CERT_SHA1_HASH: "${{ secrets.WIN_SIGN_CERT_SHA1_HASH }}" + steps: - - uses: actions/checkout@v3 + - name: Check Package Signing Enabled + shell: bash + id: check-pkg-sign + run: | + if [ "${{ inputs.sign-packages }}" == "true" ]; then + if [ "${{ (secrets.WIN_SIGN_API_KEY != '' && env.SM_HOST != '' && inputs.environment == 'staging') && 'true' || 'false' }}" != "true" ]; then + MSG="Secrets for signing packages are not available. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + else + MSG="The packages created WILL be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=true" >> "$GITHUB_OUTPUT" + fi + else + MSG="The sign-packages input is false. The packages created will NOT be signed." + echo "${MSG}" + echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" + echo "sign-pkgs=false" >> "$GITHUB_OUTPUT" + fi + + - uses: actions/checkout@v4 - uses: actions/setup-python@v4 with: - python-version: 3.9 + python-version: 3.11 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Setup Salt Version id: setup-salt-version @@ -41,21 +109,48 @@ jobs: uses: actions/download-artifact@v3 with: name: salt-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.zip + path: artifacts/ + + - name: Code signing with Software Trust Manager + if: ${{ steps.check-pkg-sign.outputs.sign-pkgs == 'true' }} + uses: digicert/ssm-code-signing@v0.0.2 + + - name: Setup Certificate + if: ${{ steps.check-pkg-sign.outputs.sign-pkgs == 'true' }} + shell: bash + run: | + echo "${{ secrets.WIN_SIGN_CERT_FILE_B64 }}" | base64 --decode > /d/Certificate_pkcs12.p12 - - name: Decompress Onedir Zipfile + - name: Build Windows Packages run: | - py -3 -m zipfile -e salt-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.zip pkg/windows/ - mv pkg/windows/salt pkg/windows/buildenv + tools pkg build windows --relenv-version=${{ inputs.relenv-version }} --python-version=${{ inputs.python-version }} ${{ + inputs.source == 'onedir' && + format( + '--onedir salt-{0}-onedir-windows-{1}.zip --salt-version {0} --arch {1} {2}', + inputs.salt-version, + matrix.arch, + steps.check-pkg-sign.outputs.sign-pkgs == 'true' && '--sign' || '' + ) + || + format('--salt-version {0} --arch {1}', inputs.salt-version, matrix.arch) + }} - - name: Build Package - shell: powershell + - name: Set Artifact Name + id: set-artifact-name + shell: bash run: | - & pkg/windows/build.cmd -Architecture ${{ matrix.arch }} -Version ${{ inputs.salt-version }} -CICD -SkipInstall + if [ "${{ inputs.source }}" != "src" ]; then + echo "artifact-name-nsis=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS" >> "$GITHUB_OUTPUT" + echo "artifact-name-msi=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI" >> "$GITHUB_OUTPUT" + else + echo "artifact-name-nsis=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS-from-src" >> "$GITHUB_OUTPUT" + echo "artifact-name-msi=salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI-from-src" >> "$GITHUB_OUTPUT" + fi - - name: Upload ${{ matrix.arch }} Packages + - name: Upload ${{ matrix.arch }} NSIS Packages uses: actions/upload-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-NSIS + name: ${{ steps.set-artifact-name.outputs.artifact-name-nsis }} path: pkg/windows/build/Salt-*.exe retention-days: 7 if-no-files-found: error @@ -63,7 +158,7 @@ jobs: - name: Upload ${{ matrix.arch }} MSI Package uses: actions/upload-artifact@v3 with: - name: salt-${{ inputs.salt-version }}-${{ matrix.arch }}-MSI + name: ${{ steps.set-artifact-name.outputs.artifact-name-msi }} path: pkg/windows/build/Salt-*.msi retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/check-workflow-run.yml b/.github/workflows/check-workflow-run.yml deleted file mode 100644 index cc971b67b9f7..000000000000 --- a/.github/workflows/check-workflow-run.yml +++ /dev/null @@ -1,42 +0,0 @@ ---- -name: Check Workflow Run - -on: - workflow_run: - types: - - completed - workflows: - - CI - - Nightly - - Scheduled - - Stage Release - -permissions: - actions: write - -jobs: - restart-failed-workflow-runs: - name: "Restart Workflow (ID: ${{ github.event.workflow_run.id }}; Attempt: ${{ github.event.workflow_run.run_attempt }})" - if: ${{ github.event.workflow_run.conclusion == 'failure' }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - id: setup-python - with: - python-version: "3.10" - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Pretty Print The GH Actions Event - run: - tools ci print-gh-event - - - name: Restart Workflow - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - tools ci rerun-workflow diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 422d6d0e7151..ffeb2dfef10f 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,19 +2,27 @@ # Instead, edit the template '.github/workflows/templates/ci.yml.jinja' --- name: CI -run-name: "CI (${{ github.event_name == 'pull_request' && format('PR: #{0}', github.event.number) || format('Branch: {0}', github.ref_name) }})" +run-name: "CI (${{ github.event_name == 'pull_request' && format('pr: #{0}', github.event.number) || format('{0}: {1}', startsWith(github.event.ref, 'refs/tags') && 'tag' || 'branch', github.ref_name) }})" + on: push: {} - pull_request: {} + pull_request: + types: + - labeled + - unlabeled + - opened + - reopened + - synchronize env: COLUMNS: 190 - CACHE_SEED: SEED-2 # Bump the number to invalidate all caches + CACHE_SEED: SEED-7 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: contents: read # for dorny/paths-filter to fetch a list of changed files pull-requests: read # for dorny/paths-filter to read pull requests + actions: read # for technote-space/workflow-conclusion-action to get the job statuses concurrency: # Concurrency is defined in a way that concurrent builds against branches do @@ -28,16 +36,20 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} + pull-labels: ${{ steps.get-pull-labels.outputs.labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Full clone to also get the tags to get the right salt version @@ -83,9 +95,11 @@ jobs: - *doc_requirements workflows: - added|modified: + - cicd/shared-gh-workflows-context.yml - .github/actions/**/action.yml - .github/workflows/*.yml - .github/workflows/templates/*.yml.jinja2 + - tools/precommit/workflows.py salt: - added|modified: &salt_added_modified - setup.py @@ -123,11 +137,18 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -135,6 +156,14 @@ jobs: salt-version: "" validate-version: true + - name: Get Pull Request Test Labels + id: get-pull-labels + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} + - name: Write Changed Files To A Local File run: echo '${{ toJSON(steps.changed-files.outputs) }}' > changed-files.json @@ -172,6 +201,26 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -193,10 +242,7 @@ jobs: name: testrun-changed-files.txt path: testrun-changed-files.txt - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -220,35 +266,21 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-binary: python3 + python-version: "3.10" - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -256,28 +288,30 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - # TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning - # issues on pre-3006 development versions on deb-based distros. - name: Update Debian changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" + tools changelog update-deb --draft + tools changelog update-deb - name: Update RPM changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-rpm --draft tools changelog update-rpm - name: Update Release Notes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-release-notes --draft tools changelog update-release-notes - name: Generate MAN Pages shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}" SALT_ON_SALTSTACK: "1" @@ -286,22 +320,26 @@ jobs: - name: Update Changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-changelog-md --draft tools changelog update-changelog-md - name: Show Changes Diff shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git diff --color - name: Configure Git shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git config --global user.name "Salt Project Packaging" git config --global user.email saltproject-packaging@vmware.com - name: Setup Pre-Commit + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: ./.github/actions/setup-pre-commit with: version: "3.0.4" @@ -309,8 +347,10 @@ jobs: - name: Commit Changes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -318,11 +358,13 @@ jobs: - name: Create release changes patch shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch - name: Upload Changes Diff Artifact uses: actions/upload-artifact@v3 + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch @@ -346,24 +388,19 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -371,849 +408,2631 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: + build-deps-onedir-linux: + name: Build Dependencies Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + uses: ./.github/workflows/build-deps-onedir-linux.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-deps-onedir-windows: + name: Build Dependencies Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + uses: ./.github/workflows/build-deps-onedir-windows.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-deps-onedir-macos: name: Build Dependencies Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml + uses: ./.github/workflows/build-deps-onedir-macos.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-salt-onedir-linux: + name: Build Salt Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} + needs: + - prepare-workflow + - build-deps-onedir-linux + - build-source-tarball + uses: ./.github/workflows/build-salt-onedir-linux.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-salt-onedir-windows: + name: Build Salt Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} + needs: + - prepare-workflow + - build-deps-onedir-windows + - build-source-tarball + uses: ./.github/workflows/build-salt-onedir-windows.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.7.0" - python-version-linux: "3.10.10" - python-version-macos: "3.10.10" - python-version-windows: "3.10.10" + relenv-version: "0.14.2" + python-version: "3.10.13" - build-salt-onedir: + build-salt-onedir-macos: name: Build Salt Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir + - build-deps-onedir-macos - build-source-tarball - uses: ./.github/workflows/build-salt-onedir.yml + uses: ./.github/workflows/build-salt-onedir-macos.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.7.0" - python-version-linux: "3.10.10" - python-version-macos: "3.10.10" - python-version-windows: "3.10.10" + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-rpm-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-rpm-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-rpm-pkgs: - name: Build RPM Packages + build-rpm-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + + build-deb-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deb-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-deb-pkgs: - name: Build DEB Packages + build-deb-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + + build-windows-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-windows-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-windows-pkgs: - name: Build Windows Packages + build-windows-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-windows uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" - build-macos-pkgs: - name: Build macOS Packages + build-macos-pkgs-onedir: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-macos uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - amazonlinux-2-pkg-tests: - name: Amazon Linux 2 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + build-macos-pkgs-src: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-macos + uses: ./.github/workflows/build-macos-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + + windows-2016-ci-deps: + name: Windows 2016 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + windows-2019-ci-deps: + name: Windows 2019 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2019 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + windows-2022-ci-deps: + name: Windows 2022 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2022 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-12-ci-deps: + name: macOS 12 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-13-ci-deps: + name: macOS 13 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-13-xlarge-ci-deps: + name: macOS 13 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-8-ci-deps: + name: Alma Linux 8 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-8-arm64-ci-deps: + name: Alma Linux 8 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-9-ci-deps: + name: Alma Linux 9 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-9-arm64-ci-deps: + name: Alma Linux 9 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2-ci-deps: + name: Amazon Linux 2 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: distro-slug: amazonlinux-2 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} - skip-junit-reports: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centos-7-pkg-tests: - name: CentOS 7 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2-arm64-ci-deps: + name: Amazon Linux 2 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + archlinux-lts-ci-deps: + name: Arch Linux LTS Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: archlinux-lts + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centos-7-ci-deps: + name: CentOS 7 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: distro-slug: centos-7 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} - skip-junit-reports: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centosstream-8-pkg-tests: - name: CentOS Stream 8 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + centos-7-arm64-ci-deps: + name: CentOS 7 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centos-7-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-ci-deps: + name: CentOS Stream 8 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: distro-slug: centosstream-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-arm64-ci-deps: + name: CentOS Stream 8 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-ci-deps: + name: CentOS Stream 9 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-arm64-ci-deps: + name: CentOS Stream 9 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-10-ci-deps: + name: Debian 10 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-ci-deps: + name: Debian 11 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-arm64-ci-deps: + name: Debian 11 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-ci-deps: + name: Debian 12 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-arm64-ci-deps: + name: Debian 12 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-ci-deps: + name: Fedora 37 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-arm64-ci-deps: + name: Fedora 37 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-ci-deps: + name: Fedora 38 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-arm64-ci-deps: + name: Fedora 38 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + opensuse-15-ci-deps: + name: Opensuse 15 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: opensuse-15 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-ci-deps: + name: Photon OS 3 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-arm64-ci-deps: + name: Photon OS 3 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-ci-deps: + name: Photon OS 4 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-arm64-ci-deps: + name: Photon OS 4 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-ci-deps: + name: Photon OS 5 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-arm64-ci-deps: + name: Photon OS 5 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-ci-deps: + name: Ubuntu 20.04 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-arm64-ci-deps: + name: Ubuntu 20.04 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-ci-deps: + name: Ubuntu 22.04 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-arm64-ci-deps: + name: Ubuntu 22.04 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2-pkg-tests: + name: Amazon Linux 2 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centos-7-pkg-tests: + name: CentOS 7 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centos-7-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-8-pkg-tests: + name: CentOS Stream 8 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-8-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-9-pkg-tests: + name: CentOS Stream 9 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-10-pkg-tests: + name: Debian 10 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-10-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-pkg-tests: + name: Debian 11 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-11-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-arm64-pkg-tests: + name: Debian 11 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-11-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-pkg-tests: + name: Debian 12 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-12-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-arm64-pkg-tests: + name: Debian 12 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-12-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-3-pkg-tests: + name: Photon OS 3 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-3-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-3-arm64-pkg-tests: + name: Photon OS 3 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-3-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir platform: linux - arch: x86_64 + arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} - centosstream-9-pkg-tests: - name: CentOS Stream 9 Package Tests + photonos-4-pkg-tests: + name: Photon OS 4 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs + - build-rpm-pkgs-onedir + - photonos-4-ci-deps uses: ./.github/workflows/test-packages-action.yml with: - distro-slug: centosstream-9 + distro-slug: photonos-4 + nox-session: ci-test-onedir platform: linux arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true - debian-10-pkg-tests: - name: Debian 10 Package Tests + photonos-4-arm64-pkg-tests: + name: Photon OS 4 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-rpm-pkgs-onedir + - photonos-4-arm64-ci-deps uses: ./.github/workflows/test-packages-action.yml with: - distro-slug: debian-10 + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir platform: linux - arch: x86_64 + arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true - debian-11-pkg-tests: - name: Debian 11 Package Tests + photonos-5-pkg-tests: + name: Photon OS 5 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-rpm-pkgs-onedir + - photonos-5-ci-deps uses: ./.github/workflows/test-packages-action.yml with: - distro-slug: debian-11 + distro-slug: photonos-5 + nox-session: ci-test-onedir platform: linux arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true - debian-11-arm64-pkg-tests: - name: Debian 11 Arm64 Package Tests + photonos-5-arm64-pkg-tests: + name: Photon OS 5 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-rpm-pkgs-onedir + - photonos-5-arm64-ci-deps uses: ./.github/workflows/test-packages-action.yml with: - distro-slug: debian-11-arm64 + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true ubuntu-2004-pkg-tests: - name: Ubuntu 20.04 Package Tests + name: Ubuntu 20.04 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir + - ubuntu-2004-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir platform: linux arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-arm64-pkg-tests: - name: Ubuntu 20.04 Arm64 Package Tests + name: Ubuntu 20.04 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir + - ubuntu-2004-arm64-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-pkg-tests: - name: Ubuntu 22.04 Package Tests + name: Ubuntu 22.04 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir + - ubuntu-2204-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir platform: linux arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-arm64-pkg-tests: - name: Ubuntu 22.04 Arm64 Package Tests + name: Ubuntu 22.04 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir + - ubuntu-2204-arm64-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} macos-12-pkg-tests: - name: macOS 12 Package Tests + name: macOS 12 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-macos-pkgs + - build-macos-pkgs-onedir + - macos-12-ci-deps uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-12 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-13-pkg-tests: + name: macOS 13 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-13-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir platform: darwin arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-13-xlarge-pkg-tests: + name: macOS 13 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-13-xlarge-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-nsis-pkg-tests: - name: Windows 2016 NSIS Package Tests + name: Windows 2016 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2016-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2016 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-msi-pkg-tests: - name: Windows 2016 MSI Package Tests + name: Windows 2016 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2016-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2016 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-nsis-pkg-tests: - name: Windows 2019 NSIS Package Tests + name: Windows 2019 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2019 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-msi-pkg-tests: - name: Windows 2019 MSI Package Tests + name: Windows 2019 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2019 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-nsis-pkg-tests: - name: Windows 2022 NSIS Package Tests + name: Windows 2022 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2022 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-msi-pkg-tests: - name: Windows 2022 MSI Package Tests + name: Windows 2022 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2022 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016: - name: Windows 2016 + name: Windows 2016 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2016-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2016 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 windows-2019: - name: Windows 2019 + name: Windows 2019 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2019 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 windows-2022: - name: Windows 2022 + name: Windows 2022 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2022 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 macos-12: - name: macOS 12 + name: macOS 12 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - macos-12-ci-deps uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-12 nox-session: ci-test-onedir platform: darwin arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + macos-13: + name: macOS 13 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - macos-13-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + macos-13-xlarge: + name: macOS 13 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - macos-13-xlarge-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 almalinux-8: - name: Alma Linux 8 + name: Alma Linux 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - almalinux-8-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: almalinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 almalinux-9: - name: Alma Linux 9 + name: Alma Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - almalinux-9-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: almalinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 amazonlinux-2: - name: Amazon Linux 2 + name: Amazon Linux 2 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - amazonlinux-2-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: amazonlinux-2 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 archlinux-lts: - name: Arch Linux LTS + name: Arch Linux LTS Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - archlinux-lts-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: archlinux-lts nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 centos-7: - name: CentOS 7 + name: CentOS 7 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centos-7-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centos-7 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 centosstream-8: - name: CentOS Stream 8 + name: CentOS Stream 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centosstream-8-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centosstream-8 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 centosstream-9: - name: CentOS Stream 9 + name: CentOS Stream 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centosstream-9-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centosstream-9 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 debian-10: - name: Debian 10 + name: Debian 10 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-10-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-10 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 debian-11: - name: Debian 11 + name: Debian 11 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-11-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-11 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 debian-11-arm64: - name: Debian 11 Arm64 + name: Debian 11 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-11-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-11-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 - fedora-36: - name: Fedora 36 + debian-12: + name: Debian 12 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-12-ci-deps uses: ./.github/workflows/test-action.yml with: - distro-slug: fedora-36 + distro-slug: debian-12 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + debian-12-arm64: + name: Debian 12 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - debian-12-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 fedora-37: - name: Fedora 37 + name: Fedora 37 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - fedora-37-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: fedora-37 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 fedora-38: - name: Fedora 38 + name: Fedora 38 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - fedora-38-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: fedora-38 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 opensuse-15: - name: Opensuse 15 + name: Opensuse 15 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - opensuse-15-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: opensuse-15 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 photonos-3: - name: Photon OS 3 + name: Photon OS 3 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - photonos-3-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: photonos-3 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + photonos-3-arm64: + name: Photon OS 3 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-3-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 photonos-4: - name: Photon OS 4 + name: Photon OS 4 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - photonos-4-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: photonos-4 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + fips: true + + photonos-4-arm64: + name: Photon OS 4 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-4-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + fips: true + + photonos-5: + name: Photon OS 5 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-5-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} + skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + fips: true + + photonos-5-arm64: + name: Photon OS 5 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-5-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + fips: true ubuntu-2004: - name: Ubuntu 20.04 + name: Ubuntu 20.04 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2004-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-20.04 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 ubuntu-2004-arm64: - name: Ubuntu 20.04 Arm64 + name: Ubuntu 20.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2004-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-20.04-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 ubuntu-2204: - name: Ubuntu 22.04 + name: Ubuntu 22.04 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2204-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-22.04 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 ubuntu-2204-arm64: - name: Ubuntu 22.04 Arm64 + name: Ubuntu 22.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2204-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-22.04-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: ${{ github.event_name == 'pull_request' }} + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }} skip-junit-reports: ${{ github.event_name == 'pull_request' }} + workflow-slug: ci + default-timeout: 180 + + combine-all-code-coverage: + name: Combine Code Coverage + if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + needs: + - prepare-workflow + - windows-2016-ci-deps + - windows-2019-ci-deps + - windows-2022-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - almalinux-8-ci-deps + - almalinux-8-arm64-ci-deps + - almalinux-9-ci-deps + - almalinux-9-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps + - archlinux-lts-ci-deps + - centos-7-ci-deps + - centos-7-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-9-ci-deps + - centosstream-9-arm64-ci-deps + - debian-10-ci-deps + - debian-11-ci-deps + - debian-11-arm64-ci-deps + - debian-12-ci-deps + - debian-12-arm64-ci-deps + - fedora-37-ci-deps + - fedora-37-arm64-ci-deps + - fedora-38-ci-deps + - fedora-38-arm64-ci-deps + - opensuse-15-ci-deps + - photonos-3-ci-deps + - photonos-3-arm64-ci-deps + - photonos-4-ci-deps + - photonos-4-arm64-ci-deps + - photonos-5-ci-deps + - photonos-5-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2204-ci-deps + - ubuntu-2204-arm64-ci-deps + - windows-2016 + - windows-2019 + - windows-2022 + - macos-12 + - macos-13 + - macos-13-xlarge + - almalinux-8 + - almalinux-9 + - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 + - archlinux-lts + - centos-7 + - centosstream-8 + - centosstream-9 + - debian-10 + - debian-11 + - debian-11-arm64 + - debian-12 + - debian-12-arm64 + - fedora-37 + - fedora-38 + - opensuse-15 + - photonos-3 + - photonos-3-arm64 + - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 + - ubuntu-2004 + - ubuntu-2004-arm64 + - ubuntu-2204 + - ubuntu-2204-arm64 + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.10 + if: ${{ github.event.repository.private == false }} + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + id: python-tools-scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage + + - name: Install Nox + run: | + python3 -m pip install 'nox==2022.8.7' + + + + - name: Get coverage reports + id: get-coverage-reports + uses: actions/download-artifact@v3 + with: + name: all-testrun-coverage-artifacts + path: artifacts/coverage/ + + - name: Display structure of downloaded files + run: tree -a artifacts/ + + - name: Install Codecov CLI + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + mv ./codecov /usr/local/bin/ + + - name: Create XML Coverage Reports + run: | + nox --force-color -e create-xml-coverage-reports + + - name: Upload Code Coverage To Codecov + run: | + tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/ + + - name: Combine Code Coverage + run: | + nox --force-color -e combine-coverage + + - name: Report Salt Code Coverage + run: | + nox --force-color -e coverage-report -- salt + + - name: Create Salt Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report -- salt + + - name: Create Salt Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report -- salt + + - name: Upload Salt Code Coverage HTML Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-salt-html-report + path: artifacts/coverage/html/salt + retention-days: 7 + if-no-files-found: error + + - name: Report Combined Code Coverage + run: | + nox --force-color -e coverage-report + + - name: Create Combined Code Coverage JSON Report + run: | + nox --force-color -e create-json-coverage-reports + + - name: Upload Combined Code Coverage JSON Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-full-json-report + path: artifacts/coverage/coverage.json + retention-days: 7 + if-no-files-found: error + + - name: Create Combined Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report + + - name: Upload Combined Code Coverage HTML Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-full-html-report + path: artifacts/coverage/html/full + retention-days: 7 + if-no-files-found: error set-pipeline-exit-status: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow - pre-commit - lint - build-docs - - build-deps-onedir - - build-salt-onedir + - build-deps-onedir-linux + - build-deps-onedir-windows + - build-deps-onedir-macos + - build-salt-onedir-linux + - build-salt-onedir-windows + - build-salt-onedir-macos + - build-rpm-pkgs-src + - build-deb-pkgs-src + - build-windows-pkgs-src + - build-macos-pkgs-src + - combine-all-code-coverage + - windows-2016-ci-deps + - windows-2019-ci-deps + - windows-2022-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - almalinux-8-ci-deps + - almalinux-8-arm64-ci-deps + - almalinux-9-ci-deps + - almalinux-9-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps + - archlinux-lts-ci-deps + - centos-7-ci-deps + - centos-7-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-9-ci-deps + - centosstream-9-arm64-ci-deps + - debian-10-ci-deps + - debian-11-ci-deps + - debian-11-arm64-ci-deps + - debian-12-ci-deps + - debian-12-arm64-ci-deps + - fedora-37-ci-deps + - fedora-37-arm64-ci-deps + - fedora-38-ci-deps + - fedora-38-arm64-ci-deps + - opensuse-15-ci-deps + - photonos-3-ci-deps + - photonos-3-arm64-ci-deps + - photonos-4-ci-deps + - photonos-4-arm64-ci-deps + - photonos-5-ci-deps + - photonos-5-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2204-ci-deps + - ubuntu-2204-arm64-ci-deps - windows-2016 - windows-2019 - windows-2022 - macos-12 + - macos-13 + - macos-13-xlarge - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -1221,28 +3040,47 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 + - debian-12 + - debian-12-arm64 - fedora-37 - fedora-38 - opensuse-15 - photonos-3 + - photonos-3-arm64 - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 - ubuntu-2004 - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests + - debian-12-pkg-tests + - debian-12-arm64-pkg-tests + - photonos-3-pkg-tests + - photonos-3-arm64-pkg-tests + - photonos-4-pkg-tests + - photonos-4-arm64-pkg-tests + - photonos-5-pkg-tests + - photonos-5-arm64-pkg-tests - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests - ubuntu-2204-arm64-pkg-tests - macos-12-pkg-tests + - macos-13-pkg-tests + - macos-13-xlarge-pkg-tests - windows-2016-nsis-pkg-tests - windows-2016-msi-pkg-tests - windows-2019-nsis-pkg-tests diff --git a/.github/workflows/lint-action.yml b/.github/workflows/lint-action.yml index 6e0df43e4d07..347ddd291986 100644 --- a/.github/workflows/lint-action.yml +++ b/.github/workflows/lint-action.yml @@ -18,21 +18,23 @@ env: jobs: Salt: name: Lint Salt's Source Code - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "large", "x86_64"]') || 'ubuntu-latest' }} if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['salt'] || fromJSON(inputs.changed-files)['lint'] }} container: - image: python:3.8-slim-buster + image: ghcr.io/saltstack/salt-ci-containers/python:3.8 steps: - name: Install System Deps run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list apt-get update - apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports + apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - - uses: actions/checkout@v3 + - name: Add Git Safe Directory + run: | + git config --global --add safe.directory "$(pwd)" + + - uses: actions/checkout@v4 - name: Install Nox run: | @@ -60,21 +62,24 @@ jobs: Tests: name: Lint Salt's Test Suite - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "large", "x86_64"]') || 'ubuntu-latest' }} if: ${{ contains(fromJSON('["push", "schedule", "workflow_dispatch"]'), github.event_name) || fromJSON(inputs.changed-files)['tests'] || fromJSON(inputs.changed-files)['lint'] }} container: - image: python:3.8-slim-buster + image: ghcr.io/saltstack/salt-ci-containers/python:3.8 steps: - name: Install System Deps run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list + echo "deb http://deb.debian.org/debian bookworm-backports main" >> /etc/apt/sources.list apt-get update - apt-get install -y enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports + apt-get install -y enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev + + - name: Add Git Safe Directory + run: | + git config --global --add safe.directory "$(pwd)" - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install Nox run: | diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index e2fb1546d2b2..62c58619c70d 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -3,17 +3,26 @@ --- name: Nightly -run-name: "Nightly (${{ format('Branch: {0}', github.ref_name) }})" +run-name: "Nightly (branch: ${{ github.ref_name }})" on: - workflow_dispatch: {} + workflow_dispatch: + inputs: + skip-salt-test-suite: + type: boolean + default: false + description: Skip running the Salt test suite. + skip-salt-pkg-test-suite: + type: boolean + default: false + description: Skip running the Salt packages test suite. schedule: # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule - - cron: '0 1 * * *' # Every day at 1AM + - cron: '0 0 * * *' # Every day at 0AM env: COLUMNS: 190 - CACHE_SEED: SEED-2 # Bump the number to invalidate all caches + CACHE_SEED: SEED-7 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: @@ -29,14 +38,14 @@ jobs: workflow-requirements: name: Check Workflow Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: requirements-met: ${{ steps.check-requirements.outputs.requirements-met }} steps: - name: Check Requirements id: check-requirements run: | - if [ "${RUN_SCHEDULED_BUILDS}" = "1" ]; then + if [ "${{ vars.RUN_SCHEDULED_BUILDS }}" = "1" ]; then MSG="Running workflow because RUN_SCHEDULED_BUILDS=1" echo "${MSG}" echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" @@ -55,9 +64,8 @@ jobs: trigger-branch-nightly-builds: name: Trigger Branch Workflows - if: ${{ github.event_name == 'schedule' }} - runs-on: - - ubuntu-latest + if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - workflow-requirements steps: @@ -69,7 +77,7 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} if: ${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }} needs: - workflow-requirements @@ -77,11 +85,15 @@ jobs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} + pull-labels: ${{ steps.get-pull-labels.outputs.labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Full clone to also get the tags to get the right salt version @@ -127,9 +139,11 @@ jobs: - *doc_requirements workflows: - added|modified: + - cicd/shared-gh-workflows-context.yml - .github/actions/**/action.yml - .github/workflows/*.yml - .github/workflows/templates/*.yml.jinja2 + - tools/precommit/workflows.py salt: - added|modified: &salt_added_modified - setup.py @@ -167,11 +181,18 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -179,6 +200,14 @@ jobs: salt-version: "" validate-version: true + - name: Get Pull Request Test Labels + id: get-pull-labels + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} + - name: Write Changed Files To A Local File run: echo '${{ toJSON(steps.changed-files.outputs) }}' > changed-files.json @@ -210,12 +239,32 @@ jobs: - name: Define Jobs id: define-jobs run: | - tools ci define-jobs ${{ github.event_name }} changed-files.json + tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json - name: Check Defined Jobs run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -237,10 +286,7 @@ jobs: name: testrun-changed-files.txt path: testrun-changed-files.txt - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -264,35 +310,21 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-binary: python3 + python-version: "3.10" - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -300,28 +332,35 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - # TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning - # issues on pre-3006 development versions on deb-based distros. - name: Update Debian changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" + tools changelog update-deb --draft + tools changelog update-deb - name: Update RPM changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-rpm --draft tools changelog update-rpm - name: Update Release Notes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - tools changelog update-release-notes --draft - tools changelog update-release-notes + if [ "${{ contains(fromJSON('["master"]'), github.ref_name) }}" == "true" ]; then + tools changelog update-release-notes --draft --next-release + tools changelog update-release-notes --next-release + else + tools changelog update-release-notes --draft + tools changelog update-release-notes + fi - name: Generate MAN Pages shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}" SALT_ON_SALTSTACK: "1" @@ -330,22 +369,26 @@ jobs: - name: Update Changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-changelog-md --draft tools changelog update-changelog-md - name: Show Changes Diff shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git diff --color - name: Configure Git shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git config --global user.name "Salt Project Packaging" git config --global user.email saltproject-packaging@vmware.com - name: Setup Pre-Commit + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: ./.github/actions/setup-pre-commit with: version: "3.0.4" @@ -353,8 +396,10 @@ jobs: - name: Commit Changes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -362,11 +407,13 @@ jobs: - name: Create release changes patch shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch - name: Upload Changes Diff Artifact uses: actions/upload-artifact@v3 + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch @@ -390,24 +437,19 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -415,831 +457,2570 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: + build-deps-onedir-linux: + name: Build Dependencies Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + uses: ./.github/workflows/build-deps-onedir-linux.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-deps-onedir-windows: + name: Build Dependencies Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + uses: ./.github/workflows/build-deps-onedir-windows.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-deps-onedir-macos: name: Build Dependencies Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml + uses: ./.github/workflows/build-deps-onedir-macos.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-salt-onedir-linux: + name: Build Salt Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} + needs: + - prepare-workflow + - build-deps-onedir-linux + - build-source-tarball + uses: ./.github/workflows/build-salt-onedir-linux.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-salt-onedir-windows: + name: Build Salt Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} + needs: + - prepare-workflow + - build-deps-onedir-windows + - build-source-tarball + uses: ./.github/workflows/build-salt-onedir-windows.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.7.0" - python-version-linux: "3.10.10" - python-version-macos: "3.10.10" - python-version-windows: "3.10.10" + relenv-version: "0.14.2" + python-version: "3.10.13" - build-salt-onedir: + build-salt-onedir-macos: name: Build Salt Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir + - build-deps-onedir-macos - build-source-tarball - uses: ./.github/workflows/build-salt-onedir.yml + uses: ./.github/workflows/build-salt-onedir-macos.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.7.0" - python-version-linux: "3.10.10" - python-version-macos: "3.10.10" - python-version-windows: "3.10.10" + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-rpm-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-rpm-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-rpm-pkgs: - name: Build RPM Packages + build-rpm-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" - build-deb-pkgs: - name: Build DEB Packages + build-deb-pkgs-onedir: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-windows-pkgs: - name: Build Windows Packages + build-deb-pkgs-src: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deb-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + + build-windows-pkgs-onedir: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-windows uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - build-macos-pkgs: - name: Build macOS Packages + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" + environment: nightly + sign-packages: false + secrets: inherit + + build-windows-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-windows + uses: ./.github/workflows/build-windows-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + environment: nightly + sign-packages: false + secrets: inherit + + build-macos-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos + uses: ./.github/workflows/build-macos-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" + environment: nightly + sign-packages: true + secrets: inherit + + build-macos-pkgs-src: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + environment: nightly + sign-packages: true + secrets: inherit + + windows-2016-ci-deps: + name: Windows 2016 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - amazonlinux-2-pkg-tests: - name: Amazon Linux 2 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + windows-2019-ci-deps: + name: Windows 2019 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: amazonlinux-2 - platform: linux - arch: x86_64 + distro-slug: windows-2019 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centos-7-pkg-tests: - name: CentOS 7 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + windows-2022-ci-deps: + name: Windows 2022 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: centos-7 - platform: linux - arch: x86_64 + distro-slug: windows-2022 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centosstream-8-pkg-tests: - name: CentOS Stream 8 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + macos-12-ci-deps: + name: macOS 12 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml with: - distro-slug: centosstream-8 - platform: linux + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: darwin arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centosstream-9-pkg-tests: - name: CentOS Stream 9 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + macos-13-ci-deps: + name: macOS 13 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml with: - distro-slug: centosstream-9 - platform: linux + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - debian-10-pkg-tests: - name: Debian 10 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + macos-13-xlarge-ci-deps: + name: macOS 13 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml with: - distro-slug: debian-10 - platform: linux - arch: x86_64 + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - debian-11-pkg-tests: - name: Debian 11 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + almalinux-8-ci-deps: + name: Alma Linux 8 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: debian-11 + distro-slug: almalinux-8 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - debian-11-arm64-pkg-tests: - name: Debian 11 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + almalinux-8-arm64-ci-deps: + name: Alma Linux 8 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: debian-11-arm64 + distro-slug: almalinux-8-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - ubuntu-2004-pkg-tests: - name: Ubuntu 20.04 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + almalinux-9-ci-deps: + name: Alma Linux 9 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: ubuntu-20.04 + distro-slug: almalinux-9 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - ubuntu-2004-arm64-pkg-tests: - name: Ubuntu 20.04 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + almalinux-9-arm64-ci-deps: + name: Alma Linux 9 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: ubuntu-20.04-arm64 + distro-slug: almalinux-9-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - ubuntu-2204-pkg-tests: - name: Ubuntu 22.04 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2-ci-deps: + name: Amazon Linux 2 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: ubuntu-22.04 + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - ubuntu-2204-arm64-pkg-tests: - name: Ubuntu 22.04 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2-arm64-ci-deps: + name: Amazon Linux 2 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: ubuntu-22.04-arm64 + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - macos-12-pkg-tests: - name: macOS 12 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-macos-pkgs - uses: ./.github/workflows/test-packages-action-macos.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: macos-12 - platform: darwin + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - windows-2016-nsis-pkg-tests: - name: Windows 2016 NSIS Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: windows-2016 - platform: windows - arch: amd64 + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - windows-2016-msi-pkg-tests: - name: Windows 2016 MSI Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + archlinux-lts-ci-deps: + name: Arch Linux LTS Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: windows-2016 - platform: windows - arch: amd64 - salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + distro-slug: archlinux-lts + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centos-7-ci-deps: + name: CentOS 7 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centos-7-arm64-ci-deps: + name: CentOS 7 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centos-7-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-ci-deps: + name: CentOS Stream 8 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-arm64-ci-deps: + name: CentOS Stream 8 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-ci-deps: + name: CentOS Stream 9 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-arm64-ci-deps: + name: CentOS Stream 9 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-10-ci-deps: + name: Debian 10 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-ci-deps: + name: Debian 11 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-arm64-ci-deps: + name: Debian 11 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-ci-deps: + name: Debian 12 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-arm64-ci-deps: + name: Debian 12 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-ci-deps: + name: Fedora 37 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-arm64-ci-deps: + name: Fedora 37 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-ci-deps: + name: Fedora 38 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-arm64-ci-deps: + name: Fedora 38 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + opensuse-15-ci-deps: + name: Opensuse 15 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: opensuse-15 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-ci-deps: + name: Photon OS 3 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-arm64-ci-deps: + name: Photon OS 3 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-ci-deps: + name: Photon OS 4 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-arm64-ci-deps: + name: Photon OS 4 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-ci-deps: + name: Photon OS 5 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-arm64-ci-deps: + name: Photon OS 5 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-ci-deps: + name: Ubuntu 20.04 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-arm64-ci-deps: + name: Ubuntu 20.04 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-ci-deps: + name: Ubuntu 22.04 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-arm64-ci-deps: + name: Ubuntu 22.04 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2-pkg-tests: + name: Amazon Linux 2 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centos-7-pkg-tests: + name: CentOS 7 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centos-7-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-8-pkg-tests: + name: CentOS Stream 8 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-8-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-9-pkg-tests: + name: CentOS Stream 9 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-10-pkg-tests: + name: Debian 10 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-10-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-pkg-tests: + name: Debian 11 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-11-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-arm64-pkg-tests: + name: Debian 11 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-11-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-pkg-tests: + name: Debian 12 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-12-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-arm64-pkg-tests: + name: Debian 12 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-12-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-3-pkg-tests: + name: Photon OS 3 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-3-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-3-arm64-pkg-tests: + name: Photon OS 3 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-3-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-4-pkg-tests: + name: Photon OS 4 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-4-arm64-pkg-tests: + name: Photon OS 4 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-5-pkg-tests: + name: Photon OS 5 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-5-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-5-arm64-pkg-tests: + name: Photon OS 5 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-5-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + ubuntu-2004-pkg-tests: + name: Ubuntu 20.04 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - ubuntu-2004-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + ubuntu-2004-arm64-pkg-tests: + name: Ubuntu 20.04 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - ubuntu-2004-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + ubuntu-2204-pkg-tests: + name: Ubuntu 22.04 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - ubuntu-2204-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + ubuntu-2204-arm64-pkg-tests: + name: Ubuntu 22.04 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - ubuntu-2204-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-12-pkg-tests: + name: macOS 12 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-12-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-13-pkg-tests: + name: macOS 13 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-13-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-13-xlarge-pkg-tests: + name: macOS 13 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-13-xlarge-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + windows-2016-nsis-pkg-tests: + name: Windows 2016 NSIS Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-windows-pkgs-onedir + - windows-2016-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: NSIS + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + windows-2016-msi-pkg-tests: + name: Windows 2016 MSI Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-windows-pkgs-onedir + - windows-2016-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: MSI + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-nsis-pkg-tests: - name: Windows 2019 NSIS Package Tests + name: Windows 2019 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2019 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-msi-pkg-tests: - name: Windows 2019 MSI Package Tests + name: Windows 2019 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2019 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-nsis-pkg-tests: - name: Windows 2022 NSIS Package Tests + name: Windows 2022 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2022 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-msi-pkg-tests: - name: Windows 2022 MSI Package Tests + name: Windows 2022 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2022 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016: - name: Windows 2016 + name: Windows 2016 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2016-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2016 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 windows-2019: - name: Windows 2019 + name: Windows 2019 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2019 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 windows-2022: - name: Windows 2022 + name: Windows 2022 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2022 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 macos-12: - name: macOS 12 + name: macOS 12 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - macos-12-ci-deps uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-12 nox-session: ci-test-onedir platform: darwin arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + macos-13: + name: macOS 13 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - macos-13-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + macos-13-xlarge: + name: macOS 13 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - macos-13-xlarge-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 almalinux-8: - name: Alma Linux 8 + name: Alma Linux 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - almalinux-8-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: almalinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 almalinux-9: - name: Alma Linux 9 + name: Alma Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - almalinux-9-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: almalinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + amazonlinux-2: + name: Amazon Linux 2 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 - amazonlinux-2: - name: Amazon Linux 2 + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - amazonlinux-2023-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: - distro-slug: amazonlinux-2 + distro-slug: amazonlinux-2023-arm64 nox-session: ci-test-onedir platform: linux - arch: x86_64 + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 archlinux-lts: - name: Arch Linux LTS + name: Arch Linux LTS Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - archlinux-lts-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: archlinux-lts nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 centos-7: - name: CentOS 7 + name: CentOS 7 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centos-7-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centos-7 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 centosstream-8: - name: CentOS Stream 8 + name: CentOS Stream 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centosstream-8-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centosstream-8 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 centosstream-9: - name: CentOS Stream 9 + name: CentOS Stream 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centosstream-9-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centosstream-9 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 debian-10: - name: Debian 10 + name: Debian 10 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-10-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-10 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 debian-11: - name: Debian 11 + name: Debian 11 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-11-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-11 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 debian-11-arm64: - name: Debian 11 Arm64 + name: Debian 11 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-11-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-11-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 - fedora-36: - name: Fedora 36 + debian-12: + name: Debian 12 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-12-ci-deps uses: ./.github/workflows/test-action.yml with: - distro-slug: fedora-36 + distro-slug: debian-12 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + debian-12-arm64: + name: Debian 12 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - debian-12-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 fedora-37: - name: Fedora 37 + name: Fedora 37 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - fedora-37-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: fedora-37 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 fedora-38: - name: Fedora 38 + name: Fedora 38 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - fedora-38-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: fedora-38 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 opensuse-15: - name: Opensuse 15 + name: Opensuse 15 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - opensuse-15-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: opensuse-15 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 photonos-3: - name: Photon OS 3 + name: Photon OS 3 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - photonos-3-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: photonos-3 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + photonos-3-arm64: + name: Photon OS 3 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-3-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 photonos-4: - name: Photon OS 4 + name: Photon OS 4 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - photonos-4-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: photonos-4 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + fips: true + + photonos-4-arm64: + name: Photon OS 4 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-4-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + fips: true + + photonos-5: + name: Photon OS 5 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-5-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + fips: true + + photonos-5-arm64: + name: Photon OS 5 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-5-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + fips: true ubuntu-2004: - name: Ubuntu 20.04 + name: Ubuntu 20.04 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2004-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-20.04 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 ubuntu-2004-arm64: - name: Ubuntu 20.04 Arm64 + name: Ubuntu 20.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2004-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-20.04-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 ubuntu-2204: - name: Ubuntu 22.04 + name: Ubuntu 22.04 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2204-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-22.04 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 ubuntu-2204-arm64: - name: Ubuntu 22.04 Arm64 + name: Ubuntu 22.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2204-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-22.04-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: nightly + default-timeout: 360 + + combine-all-code-coverage: + name: Combine Code Coverage + if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + needs: + - prepare-workflow + - windows-2016-ci-deps + - windows-2019-ci-deps + - windows-2022-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - almalinux-8-ci-deps + - almalinux-8-arm64-ci-deps + - almalinux-9-ci-deps + - almalinux-9-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps + - archlinux-lts-ci-deps + - centos-7-ci-deps + - centos-7-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-9-ci-deps + - centosstream-9-arm64-ci-deps + - debian-10-ci-deps + - debian-11-ci-deps + - debian-11-arm64-ci-deps + - debian-12-ci-deps + - debian-12-arm64-ci-deps + - fedora-37-ci-deps + - fedora-37-arm64-ci-deps + - fedora-38-ci-deps + - fedora-38-arm64-ci-deps + - opensuse-15-ci-deps + - photonos-3-ci-deps + - photonos-3-arm64-ci-deps + - photonos-4-ci-deps + - photonos-4-arm64-ci-deps + - photonos-5-ci-deps + - photonos-5-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2204-ci-deps + - ubuntu-2204-arm64-ci-deps + - windows-2016 + - windows-2019 + - windows-2022 + - macos-12 + - macos-13 + - macos-13-xlarge + - almalinux-8 + - almalinux-9 + - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 + - archlinux-lts + - centos-7 + - centosstream-8 + - centosstream-9 + - debian-10 + - debian-11 + - debian-11-arm64 + - debian-12 + - debian-12-arm64 + - fedora-37 + - fedora-38 + - opensuse-15 + - photonos-3 + - photonos-3-arm64 + - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 + - ubuntu-2004 + - ubuntu-2004-arm64 + - ubuntu-2204 + - ubuntu-2204-arm64 + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.10 + if: ${{ github.event.repository.private == false }} + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + id: python-tools-scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage + + - name: Install Nox + run: | + python3 -m pip install 'nox==2022.8.7' + + + + - name: Get coverage reports + id: get-coverage-reports + uses: actions/download-artifact@v3 + with: + name: all-testrun-coverage-artifacts + path: artifacts/coverage/ + + - name: Display structure of downloaded files + run: tree -a artifacts/ + + - name: Install Codecov CLI + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + mv ./codecov /usr/local/bin/ + + - name: Create XML Coverage Reports + run: | + nox --force-color -e create-xml-coverage-reports + + - name: Upload Code Coverage To Codecov + run: | + tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/ + + - name: Combine Code Coverage + run: | + nox --force-color -e combine-coverage + + - name: Report Salt Code Coverage + run: | + nox --force-color -e coverage-report -- salt + + - name: Create Salt Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report -- salt + + - name: Create Salt Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report -- salt + + - name: Upload Salt Code Coverage HTML Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-salt-html-report + path: artifacts/coverage/html/salt + retention-days: 7 + if-no-files-found: error + + - name: Report Combined Code Coverage + run: | + nox --force-color -e coverage-report + + - name: Create Combined Code Coverage JSON Report + run: | + nox --force-color -e create-json-coverage-reports + + - name: Upload Combined Code Coverage JSON Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-full-json-report + path: artifacts/coverage/coverage.json + retention-days: 7 + if-no-files-found: error + + - name: Create Combined Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report + + - name: Upload Combined Code Coverage HTML Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-full-html-report + path: artifacts/coverage/html/full + retention-days: 7 + if-no-files-found: error build-src-repo: - name: Build Source Repository + name: Build Repository environment: nightly runs-on: - self-hosted @@ -1247,12 +3028,26 @@ jobs: - repo-nightly needs: - prepare-workflow - - build-salt-onedir + - build-source-tarball + strategy: + fail-fast: false + matrix: + pkg-type: + - src + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Source Tarball uses: actions/download-artifact@v3 @@ -1295,18 +3090,23 @@ jobs: - name: Create Repository run: | - tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create src --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-nightly-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error @@ -1320,7 +3120,7 @@ jobs: archive-name: src-repo build-deb-repo: - name: Build DEB Repository + name: Build Repository environment: nightly runs-on: - self-hosted @@ -1328,38 +3128,54 @@ jobs: - repo-nightly needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir strategy: fail-fast: false matrix: include: - - distro: debian + - pkg-type: deb + distro: debian version: "10" arch: x86_64 - - distro: debian + - pkg-type: deb + distro: debian version: "10" arch: aarch64 - - distro: debian + - pkg-type: deb + distro: debian version: "11" arch: x86_64 - - distro: debian + - pkg-type: deb + distro: debian version: "11" arch: aarch64 - - distro: ubuntu + - pkg-type: deb + distro: debian + version: "12" + arch: x86_64 + - pkg-type: deb + distro: debian + version: "12" + arch: aarch64 + - pkg-type: deb + distro: ubuntu version: "20.04" arch: x86_64 - - distro: ubuntu + - pkg-type: deb + distro: ubuntu version: "20.04" arch: aarch64 - - distro: ubuntu + - pkg-type: deb + distro: ubuntu version: "22.04" arch: x86_64 - - distro: ubuntu + - pkg-type: deb + distro: ubuntu version: "22.04" arch: aarch64 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download System Dependencies run: | @@ -1368,6 +3184,14 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download DEB Packages uses: actions/download-artifact@v3 @@ -1410,8 +3234,8 @@ jobs: - name: Create Repository run: | - tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1425,7 +3249,7 @@ jobs: archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo build-rpm-repo: - name: Build RPM Repository + name: Build Repository environment: nightly runs-on: - self-hosted @@ -1433,35 +3257,146 @@ jobs: - repo-nightly needs: - prepare-workflow - - build-rpm-pkgs + - build-rpm-pkgs-onedir strategy: fail-fast: false matrix: include: - - distro: amazon + - pkg-type: rpm + distro: amazon + version: "2" + arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2" + arch: arm64 + - pkg-type: rpm + distro: amazon version: "2" + arch: aarch64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: arm64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: aarch64 + - pkg-type: rpm + distro: redhat + version: "7" arch: x86_64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "7" + arch: arm64 + - pkg-type: rpm + distro: redhat + version: "7" + arch: aarch64 + - pkg-type: rpm + distro: redhat + version: "8" arch: x86_64 - - distro: redhat + - pkg-type: rpm + distro: redhat + version: "8" + arch: arm64 + - pkg-type: rpm + distro: redhat version: "8" + arch: aarch64 + - pkg-type: rpm + distro: redhat + version: "9" arch: x86_64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "9" + arch: arm64 + - pkg-type: rpm + distro: redhat + version: "9" + arch: aarch64 + - pkg-type: rpm + distro: fedora + version: "36" arch: x86_64 - - distro: fedora + - pkg-type: rpm + distro: fedora + version: "36" + arch: arm64 + - pkg-type: rpm + distro: fedora version: "36" + arch: aarch64 + - pkg-type: rpm + distro: fedora + version: "37" arch: x86_64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "37" + arch: arm64 + - pkg-type: rpm + distro: fedora + version: "37" + arch: aarch64 + - pkg-type: rpm + distro: fedora + version: "38" arch: x86_64 - - distro: fedora + - pkg-type: rpm + distro: fedora + version: "38" + arch: arm64 + - pkg-type: rpm + distro: fedora version: "38" + arch: aarch64 + - pkg-type: rpm + distro: photon + version: "3" + arch: x86_64 + - pkg-type: rpm + distro: photon + version: "3" + arch: arm64 + - pkg-type: rpm + distro: photon + version: "3" + arch: aarch64 + - pkg-type: rpm + distro: photon + version: "4" + arch: x86_64 + - pkg-type: rpm + distro: photon + version: "4" + arch: arm64 + - pkg-type: rpm + distro: photon + version: "4" + arch: aarch64 + - pkg-type: rpm + distro: photon + version: "5" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "5" + arch: arm64 + - pkg-type: rpm + distro: photon + version: "5" + arch: aarch64 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download System Dependencies run: | @@ -1470,11 +3405,19 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG @@ -1511,9 +3454,12 @@ jobs: mkdir -p artifacts/pkgs/repo - name: Create Repository + env: + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} --nightly-build-from=${{ github.ref_name }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1527,7 +3473,7 @@ jobs: archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo build-windows-repo: - name: Build Windows Repository + name: Build Repository environment: nightly runs-on: - self-hosted @@ -1535,12 +3481,26 @@ jobs: - repo-nightly needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + strategy: + fail-fast: false + matrix: + pkg-type: + - windows + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Windows NSIS x86 Packages uses: actions/download-artifact@v3 @@ -1601,8 +3561,8 @@ jobs: - name: Create Repository run: | - tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create windows --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact @@ -1615,7 +3575,7 @@ jobs: archive-name: windows-repo build-macos-repo: - name: Build macOS Repository + name: Build Repository environment: nightly runs-on: - self-hosted @@ -1623,12 +3583,26 @@ jobs: - repo-nightly needs: - prepare-workflow - - build-macos-pkgs + - build-macos-pkgs-onedir + strategy: + fail-fast: false + matrix: + pkg-type: + - macos + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download macOS x86_64 Packages uses: actions/download-artifact@v3 @@ -1636,6 +3610,12 @@ jobs: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86_64-macos path: artifacts/pkgs/incoming + - name: Download macOS Arch64 Packages + uses: actions/download-artifact@v3 + with: + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-aarch64-macos + path: artifacts/pkgs/incoming + - name: Setup GnuPG run: | sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg @@ -1671,8 +3651,8 @@ jobs: - name: Create Repository run: | - tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create macos --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact @@ -1685,7 +3665,7 @@ jobs: archive-name: macos-repo build-onedir-repo: - name: Build Onedir Repository + name: Build Repository environment: nightly runs-on: - self-hosted @@ -1693,12 +3673,28 @@ jobs: - repo-nightly needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux + - build-salt-onedir-macos + - build-salt-onedir-windows + strategy: + fail-fast: false + matrix: + pkg-type: + - onedir + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Linux x86_64 Onedir Archive uses: actions/download-artifact@v3 @@ -1718,6 +3714,12 @@ jobs: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-darwin-x86_64.tar.xz path: artifacts/pkgs/incoming + - name: Download macOS arm64 Onedir Archive + uses: actions/download-artifact@v3 + with: + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-darwin-aarch64.tar.xz + path: artifacts/pkgs/incoming + - name: Download Windows amd64 Onedir Archive uses: actions/download-artifact@v3 with: @@ -1777,8 +3779,8 @@ jobs: - name: Create Repository run: | - tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create onedir --key-id=64CBBC8173D76B3F --nightly-build-from=${{ github.ref_name }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact @@ -1792,6 +3794,7 @@ jobs: publish-repositories: name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -1805,13 +3808,59 @@ jobs: - build-windows-repo - build-macos-repo - build-onedir-repo + - windows-2016-ci-deps + - windows-2019-ci-deps + - windows-2022-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - almalinux-8-ci-deps + - almalinux-8-arm64-ci-deps + - almalinux-9-ci-deps + - almalinux-9-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps + - archlinux-lts-ci-deps + - centos-7-ci-deps + - centos-7-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-9-ci-deps + - centosstream-9-arm64-ci-deps + - debian-10-ci-deps + - debian-11-ci-deps + - debian-11-arm64-ci-deps + - debian-12-ci-deps + - debian-12-arm64-ci-deps + - fedora-37-ci-deps + - fedora-37-arm64-ci-deps + - fedora-38-ci-deps + - fedora-38-arm64-ci-deps + - opensuse-15-ci-deps + - photonos-3-ci-deps + - photonos-3-arm64-ci-deps + - photonos-4-ci-deps + - photonos-4-arm64-ci-deps + - photonos-5-ci-deps + - photonos-5-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2204-ci-deps + - ubuntu-2204-arm64-ci-deps - windows-2016 - windows-2019 - windows-2022 - macos-12 + - macos-13 + - macos-13-xlarge - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -1819,19 +3868,24 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 + - debian-12 + - debian-12-arm64 - fedora-37 - fedora-38 - opensuse-15 - photonos-3 + - photonos-3-arm64 - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 - ubuntu-2004 - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -1841,6 +3895,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 @@ -1858,15 +3914,19 @@ jobs: tree -a artifacts/pkgs/repo/ - name: Upload Repository Contents (nightly) + env: + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo publish nightly artifacts/pkgs/repo/ + tools pkg repo publish nightly --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/ set-pipeline-exit-status: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + environment: nightly needs: - workflow-requirements - trigger-branch-nightly-builds @@ -1874,21 +3934,44 @@ jobs: - pre-commit - lint - build-docs - - build-deps-onedir - - build-salt-onedir + - build-deps-onedir-linux + - build-deps-onedir-windows + - build-deps-onedir-macos + - build-salt-onedir-linux + - build-salt-onedir-windows + - build-salt-onedir-macos + - build-rpm-pkgs-src + - build-deb-pkgs-src + - build-windows-pkgs-src + - build-macos-pkgs-src + - combine-all-code-coverage - publish-repositories - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests + - debian-12-pkg-tests + - debian-12-arm64-pkg-tests + - photonos-3-pkg-tests + - photonos-3-arm64-pkg-tests + - photonos-4-pkg-tests + - photonos-4-arm64-pkg-tests + - photonos-5-pkg-tests + - photonos-5-arm64-pkg-tests - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests - ubuntu-2204-arm64-pkg-tests - macos-12-pkg-tests + - macos-13-pkg-tests + - macos-13-xlarge-pkg-tests - windows-2016-nsis-pkg-tests - windows-2016-msi-pkg-tests - windows-2019-nsis-pkg-tests @@ -1900,6 +3983,65 @@ jobs: id: get-workflow-info uses: technote-space/workflow-conclusion-action@v3 + - name: Notify Slack + id: slack + if: always() + uses: slackapi/slack-github-action@v1.24.0 + with: + payload: | + { + "attachments": [ + { + "fallback": "${{ github.workflow }} Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}", + "color": "${{ steps.get-workflow-info.outputs.conclusion != 'success' && 'ff3d00' || '00e676' }}", + "fields": [ + { + "title": "Workflow", + "short": true, + "value": "${{ github.workflow }}", + "type": "mrkdwn" + }, + { + "title": "Workflow Run", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>", + "type": "mrkdwn" + }, + { + "title": "Branch", + "short": true, + "value": "${{ github.ref_name }}", + "type": "mrkdwn" + }, + { + "title": "Commit", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>", + "type": "mrkdwn" + }, + { + "title": "Attempt", + "short": true, + "value": "${{ github.run_attempt }}", + "type": "mrkdwn" + }, + { + "title": "Status", + "short": true, + "value": "${{ steps.get-workflow-info.outputs.conclusion }}", + "type": "mrkdwn" + } + ], + "author_name": "${{ github.event.sender.login }}", + "author_link": "${{ github.event.sender.html_url }}", + "author_icon": "${{ github.event.sender.avatar_url }}" + } + ] + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + - name: Set Pipeline Exit Status shell: bash run: | diff --git a/.github/workflows/pre-commit-action.yml b/.github/workflows/pre-commit-action.yml index 00b25136c4ee..4c1a34e2f4ab 100644 --- a/.github/workflows/pre-commit-action.yml +++ b/.github/workflows/pre-commit-action.yml @@ -21,23 +21,32 @@ jobs: Pre-Commit: name: Run Pre-Commit Against Salt - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} container: - image: python:3.10-slim-buster + image: ghcr.io/saltstack/salt-ci-containers/python:3.10 + + env: + PRE_COMMIT_COLOR: always steps: - name: Install System Deps run: | - echo "deb http://deb.debian.org/debian buster-backports main" >> /etc/apt/sources.list apt-get update - apt-get install -y wget curl enchant git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev - apt-get install -y git/buster-backports + apt-get install -y wget curl enchant-2 git gcc make zlib1g-dev libc-dev libffi-dev g++ libxml2 libxml2-dev libxslt-dev libcurl4-openssl-dev libssl-dev libgnutls28-dev rustc + + - name: Add Git Safe Directory + run: | + git config --global --add safe.directory "$(pwd)" - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - uses: ./.github/actions/setup-actionlint + with: + cache-seed: ${{ inputs.cache-seed }} - uses: ./.github/actions/setup-shellcheck + with: + cache-seed: ${{ inputs.cache-seed }} - uses: ./.github/actions/setup-pre-commit with: version: ${{ inputs.pre-commit-version }} diff --git a/.github/workflows/release-tag.yml b/.github/workflows/release-tag.yml index b2ec9913fc6f..2908fd96f43c 100644 --- a/.github/workflows/release-tag.yml +++ b/.github/workflows/release-tag.yml @@ -31,7 +31,7 @@ jobs: permissions: contents: write # for dev-drprasad/delete-tag-and-release to delete tags or releases name: Generate Tag and Github Release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - uses: dev-drprasad/delete-tag-and-release@v0.2.0 if: github.event.inputs.reTag == 'true' @@ -41,7 +41,7 @@ jobs: env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: repository: ${{ github.event.inputs.saltRepo }} ref: ${{ github.event.inputs.saltBranch }} @@ -58,7 +58,7 @@ jobs: with: python-version: 3.8 - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Install pypa/build run: | python -m pip install build --user diff --git a/.github/workflows/release-update-winrepo.yml b/.github/workflows/release-update-winrepo.yml new file mode 100644 index 000000000000..4eac8c2c836c --- /dev/null +++ b/.github/workflows/release-update-winrepo.yml @@ -0,0 +1,71 @@ +--- +name: Update Winrepo + +on: + release: + types: [released] + workflow_dispatch: + inputs: + salt-version: + type: string + required: true + description: > + The Salt version to set prior to building packages and staging the release. + (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + +permissions: + contents: read + +jobs: + update-winrepo: + name: Update Winrepo + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + steps: + + - name: Checkout Salt + uses: actions/checkout@v4 + with: + path: salt + + - name: Checkout WinRepo + uses: actions/checkout@v4 + with: + path: winrepo + repository: twangboy/salt-winrepo-ng + + - name: Set Up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Add Version to Minion Definition File + working-directory: salt + run: | + python .github/workflows/scripts/update_winrepo.py \ + --file ../winrepo/salt-minion.sls \ + --version ${{ inputs.salt-version || github.ref_name }} + + - name: Commit Changes + working-directory: winrepo + run: | + git status + git add . + git config user.name 'saltbot-open' + git config user.email 'saltbot-open@users.noreply.github.com' + git commit -m "Adding salt minion ${{ inputs.salt-version && format('v{0}', inputs.salt-version) || github.ref_name }}" + + - name: Create Pull Request + id: cpr + uses: peter-evans/create-pull-request@v4 + with: + path: winrepo + push-to-fork: saltbot-open/salt-winrepo-ng + token: ${{ secrets.SALTBOT_OPEN_SALT_WINREPO_NG }} + +# - name: Enable Pull Request Automerge +# if: steps.cpr.outputs.pull-request-operation == 'created' +# uses: peter-evans/enable-pull-request-automerge@v2 +# with: +# token: ${{ secrets.SALTBOT_OPEN_SALT_WINREPO_NG }} +# pull-request-number: ${{ steps.cpr.outputs.pull-request-number }} +# merge-method: squash diff --git a/.github/workflows/release-upload-virustotal.yml b/.github/workflows/release-upload-virustotal.yml new file mode 100644 index 000000000000..da13d83ca806 --- /dev/null +++ b/.github/workflows/release-upload-virustotal.yml @@ -0,0 +1,53 @@ +--- +name: Upload VirusTotal + +on: + release: + types: [released] + workflow_dispatch: + inputs: + salt-version: + type: string + required: true + description: > + The Salt version to get from staging to upload to VirusTotal. + (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + +permissions: + contents: read + +env: + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + upload-virustotal: + name: Upload VirusTotal + environment: release + runs-on: + - self-hosted + - linux + - repo-release + steps: + + - name: Checkout Salt + uses: actions/checkout@v4 + + - name: Set Up Python 3.10 + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: virus-total + + - name: Upload to VirusTotal + env: + VIRUSTOTAL_API_KEY: ${{ secrets.VIRUSTOTAL_API_KEY }} + run: | + tools release upload-virustotal ${{ inputs.salt-version || github.ref_name }} diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e42db5aceaac..9b2ce992589b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,7 +3,7 @@ --- name: Release -run-name: "Release (${{ format('Branch: {0} // Version: {1}', github.ref_name, inputs.salt-version) }})" +run-name: "Release (branch: ${{ github.ref_name }}; version: ${{ inputs.salt-version }})" on: workflow_dispatch: @@ -14,12 +14,15 @@ on: description: > The Salt version to get from staging to publish the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + skip-salt-pkg-download-test-suite: + type: boolean + default: false + description: Skip running the Salt packages download test suite. env: COLUMNS: 190 - CACHE_SEED: SEED-2 # Bump the number to invalidate all caches + CACHE_SEED: SEED-7 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" - REPO_BASE_URL: "https://${{ secrets.SALT_REPO_DOMAIN }}" permissions: contents: write # To be able to publish the release @@ -32,7 +35,7 @@ jobs: check-requirements: name: Check Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: release-check steps: - name: Check For Admin Permission @@ -41,26 +44,22 @@ jobs: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: + - self-hosted + - linux + - repo-release + environment: release needs: - check-requirements outputs: salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} + cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Full clone to also get the tags to get the right salt version @@ -71,6 +70,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -84,8 +85,743 @@ jobs: validate-version: true - name: Check Existing Releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + if [ "${{ github.event.repository.private }}" = "true" ]; then + tools pkg repo confirm-unreleased --repository saltstack/salt ${{ steps.setup-salt-version.outputs.salt-version }} + fi + + - name: Check Release Staged + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + + download-onedir-artifact: + name: Download Staging Onedir Artifact + runs-on: + - self-hosted + - linux + - repo-release + environment: release + needs: + - prepare-workflow + strategy: + fail-fast: false + matrix: + include: + - platform: linux + arch: x86_64 + - platform: linux + arch: aarch64 + - platform: windows + arch: amd64 + - platform: windows + arch: x86 + - platform: darwin + arch: x86_64 + - platform: darwin + arch: aarch64 + steps: + - uses: actions/checkout@v4 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Download Onedir Tarball Artifact + run: | + tools release download-onedir-artifact --platform=${{ matrix.platform }} --arch=${{ matrix.arch }} ${{ inputs.salt-version }} + + - name: Upload Onedir Tarball as an Artifact + uses: actions/upload-artifact@v3 + with: + name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz + path: artifacts/salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz* + retention-days: 7 + if-no-files-found: error + + windows-2016-ci-deps: + name: Windows 2016 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + windows-2019-ci-deps: + name: Windows 2019 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2019 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + windows-2022-ci-deps: + name: Windows 2022 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2022 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-12-ci-deps: + name: macOS 12 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-13-ci-deps: + name: macOS 13 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-13-xlarge-ci-deps: + name: macOS 13 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-8-ci-deps: + name: Alma Linux 8 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-8-arm64-ci-deps: + name: Alma Linux 8 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-9-ci-deps: + name: Alma Linux 9 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-9-arm64-ci-deps: + name: Alma Linux 9 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2-ci-deps: + name: Amazon Linux 2 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2-arm64-ci-deps: + name: Amazon Linux 2 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + archlinux-lts-ci-deps: + name: Arch Linux LTS Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: archlinux-lts + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centos-7-ci-deps: + name: CentOS 7 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centos-7-arm64-ci-deps: + name: CentOS 7 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centos-7-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-ci-deps: + name: CentOS Stream 8 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-arm64-ci-deps: + name: CentOS Stream 8 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-ci-deps: + name: CentOS Stream 9 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-arm64-ci-deps: + name: CentOS Stream 9 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-10-ci-deps: + name: Debian 10 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-ci-deps: + name: Debian 11 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-arm64-ci-deps: + name: Debian 11 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-ci-deps: + name: Debian 12 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-arm64-ci-deps: + name: Debian 12 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-ci-deps: + name: Fedora 37 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-arm64-ci-deps: + name: Fedora 37 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-ci-deps: + name: Fedora 38 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-arm64-ci-deps: + name: Fedora 38 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + opensuse-15-ci-deps: + name: Opensuse 15 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: opensuse-15 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-ci-deps: + name: Photon OS 3 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-arm64-ci-deps: + name: Photon OS 3 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-ci-deps: + name: Photon OS 4 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-arm64-ci-deps: + name: Photon OS 4 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-ci-deps: + name: Photon OS 5 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-arm64-ci-deps: + name: Photon OS 5 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-ci-deps: + name: Ubuntu 20.04 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-arm64-ci-deps: + name: Ubuntu 20.04 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-ci-deps: + name: Ubuntu 22.04 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-arm64-ci-deps: + name: Ubuntu 22.04 Arm64 Deps + needs: + - prepare-workflow + - download-onedir-artifact + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 backup: name: Backup @@ -96,15 +832,25 @@ jobs: needs: - prepare-workflow environment: release + outputs: + backup-complete: ${{ steps.backup.outputs.backup-complete }} steps: - name: Clone The Salt Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 + + - name: Setup Rclone + uses: AnimMouse/setup-rclone@v1 + with: + version: v1.61.1 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Backup Previous Releases + id: backup run: | tools pkg repo backup-previous-releases @@ -117,11 +863,12 @@ jobs: needs: - prepare-workflow - backup + - download-onedir-artifact environment: release steps: - name: Clone The Salt Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -131,15 +878,75 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Publish Release Repository + env: + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo publish release \ - ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ - --key-id=64CBBC8173D76B3F ${{ needs.prepare-workflow.outputs.salt-version }} + tools pkg repo publish release ${{ needs.prepare-workflow.outputs.salt-version }} + + pkg-download-tests: + name: Package Downloads + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + needs: + - prepare-workflow + - publish-repositories + - almalinux-8-arm64-ci-deps + - almalinux-8-ci-deps + - almalinux-9-arm64-ci-deps + - almalinux-9-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2023-arm64-ci-deps + - amazonlinux-2023-ci-deps + - centos-7-arm64-ci-deps + - centos-7-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-9-arm64-ci-deps + - centosstream-9-ci-deps + - debian-10-ci-deps + - debian-11-arm64-ci-deps + - debian-11-ci-deps + - debian-12-arm64-ci-deps + - debian-12-ci-deps + - fedora-37-arm64-ci-deps + - fedora-37-ci-deps + - fedora-38-arm64-ci-deps + - fedora-38-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - photonos-3-arm64-ci-deps + - photonos-3-ci-deps + - photonos-4-arm64-ci-deps + - photonos-4-ci-deps + - photonos-5-arm64-ci-deps + - photonos-5-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2204-arm64-ci-deps + - ubuntu-2204-ci-deps + - windows-2022-ci-deps + - download-onedir-artifact + uses: ./.github/workflows/test-package-downloads-action.yml + with: + nox-session: ci-test-onedir + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: release + nox-version: 2022.8.7 + python-version: "3.10" + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit release: name: Release v${{ needs.prepare-workflow.outputs.salt-version }} + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -148,15 +955,18 @@ jobs: - prepare-workflow - backup - publish-repositories + - pkg-download-tests environment: release steps: - name: Clone The Salt Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ssh-key: ${{ secrets.GHA_SSH_KEY }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -217,6 +1027,7 @@ jobs: ssh: true tags: true atomic: true + branch: ${{ github.ref }} - name: Create Github Release uses: ncipollo/release-action@v1.12.0 @@ -233,28 +1044,139 @@ jobs: replacesArtifacts: true tag: v${{ needs.prepare-workflow.outputs.salt-version }} + - name: Upload PyPi Artifacts + uses: actions/upload-artifact@v3 + with: + name: pypi-artifacts + path: | + release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc + retention-days: 7 + if-no-files-found: error + + publish-pypi: + name: Publish to PyPi + if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }} + needs: + - prepare-workflow + - release + environment: release + runs-on: + - self-hosted + - linux + - repo-release + steps: + - uses: actions/checkout@v4 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Setup GnuPG + run: | + sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg + GNUPGHOME="$(mktemp -d -p /run/gpg)" + echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV" + cat < "${GNUPGHOME}/gpg.conf" + batch + no-tty + pinentry-mode loopback + EOF + + - name: Get Secrets + id: get-secrets + env: + SECRETS_KEY: ${{ secrets.SECRETS_KEY }} + run: | + SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX) + echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE" + TWINE_PASSWORD=$(aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/publishing/publish-pypi \ + --query SecretString --output text | jq .default_passphrase -r | base64 -d \ + | gpg --passphrase-file "$SECRETS_KEY_FILE" -d -) + echo "::add-mask::$TWINE_PASSWORD" + echo "twine-password=$TWINE_PASSWORD" >> "${GITHUB_OUTPUT}" + + - name: Download PyPi Artifacts + uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: artifacts/release + - name: Publish to PyPi env: TWINE_PASSWORD: "${{ steps.get-secrets.outputs.twine-password }}" run: | - tools pkg pypi-upload release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz set-pipeline-exit-status: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - check-requirements - prepare-workflow - publish-repositories + - pkg-download-tests - release + - publish-pypi + - windows-2016-ci-deps + - windows-2019-ci-deps + - windows-2022-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - almalinux-8-ci-deps + - almalinux-8-arm64-ci-deps + - almalinux-9-ci-deps + - almalinux-9-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps + - archlinux-lts-ci-deps + - centos-7-ci-deps + - centos-7-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-9-ci-deps + - centosstream-9-arm64-ci-deps + - debian-10-ci-deps + - debian-11-ci-deps + - debian-11-arm64-ci-deps + - debian-12-ci-deps + - debian-12-arm64-ci-deps + - fedora-37-ci-deps + - fedora-37-arm64-ci-deps + - fedora-38-ci-deps + - fedora-38-arm64-ci-deps + - opensuse-15-ci-deps + - photonos-3-ci-deps + - photonos-3-arm64-ci-deps + - photonos-4-ci-deps + - photonos-4-arm64-ci-deps + - photonos-5-ci-deps + - photonos-5-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2204-ci-deps + - ubuntu-2204-arm64-ci-deps steps: - name: Get workflow information id: get-workflow-info uses: technote-space/workflow-conclusion-action@v3 + - run: | + # shellcheck disable=SC2129 + if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then + echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + fi + - name: Set Pipeline Exit Status shell: bash run: | diff --git a/.github/workflows/scheduled.yml b/.github/workflows/scheduled.yml index 4a735a1e6dae..c03929fc970e 100644 --- a/.github/workflows/scheduled.yml +++ b/.github/workflows/scheduled.yml @@ -3,7 +3,7 @@ --- name: Scheduled -run-name: "Scheduled (${{ format('Branch: {0}', github.ref_name) }})" +run-name: "Scheduled (branch: ${{ github.ref_name }})" on: schedule: @@ -12,7 +12,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-2 # Bump the number to invalidate all caches + CACHE_SEED: SEED-7 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" permissions: @@ -28,14 +28,14 @@ jobs: workflow-requirements: name: Check Workflow Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: requirements-met: ${{ steps.check-requirements.outputs.requirements-met }} steps: - name: Check Requirements id: check-requirements run: | - if [ "${RUN_SCHEDULED_BUILDS}" = "1" ]; then + if [ "${{ vars.RUN_SCHEDULED_BUILDS }}" = "1" ]; then MSG="Running workflow because RUN_SCHEDULED_BUILDS=1" echo "${MSG}" echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" @@ -54,9 +54,8 @@ jobs: trigger-branch-scheduled-builds: name: Trigger Branch Workflows - if: ${{ github.event_name == 'schedule' }} - runs-on: - - ubuntu-latest + if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - workflow-requirements steps: @@ -68,7 +67,7 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} if: ${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }} needs: - workflow-requirements @@ -76,11 +75,15 @@ jobs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} + pull-labels: ${{ steps.get-pull-labels.outputs.labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Full clone to also get the tags to get the right salt version @@ -126,9 +129,11 @@ jobs: - *doc_requirements workflows: - added|modified: + - cicd/shared-gh-workflows-context.yml - .github/actions/**/action.yml - .github/workflows/*.yml - .github/workflows/templates/*.yml.jinja2 + - tools/precommit/workflows.py salt: - added|modified: &salt_added_modified - setup.py @@ -166,11 +171,18 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -178,6 +190,14 @@ jobs: salt-version: "" validate-version: true + - name: Get Pull Request Test Labels + id: get-pull-labels + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} + - name: Write Changed Files To A Local File run: echo '${{ toJSON(steps.changed-files.outputs) }}' > changed-files.json @@ -215,6 +235,26 @@ jobs: run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -236,10 +276,7 @@ jobs: name: testrun-changed-files.txt path: testrun-changed-files.txt - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -263,35 +300,21 @@ jobs: prepare-release: name: "Prepare Release: ${{ needs.prepare-workflow.outputs.salt-version }}" if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['prepare-release'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} needs: - prepare-workflow steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-binary: python3 + python-version: "3.10" - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -299,28 +322,30 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - # TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning - # issues on pre-3006 development versions on deb-based distros. - name: Update Debian changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" + tools changelog update-deb --draft + tools changelog update-deb - name: Update RPM changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-rpm --draft tools changelog update-rpm - name: Update Release Notes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-release-notes --draft tools changelog update-release-notes - name: Generate MAN Pages shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}" SALT_ON_SALTSTACK: "1" @@ -329,22 +354,26 @@ jobs: - name: Update Changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-changelog-md --draft tools changelog update-changelog-md - name: Show Changes Diff shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git diff --color - name: Configure Git shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git config --global user.name "Salt Project Packaging" git config --global user.email saltproject-packaging@vmware.com - name: Setup Pre-Commit + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: ./.github/actions/setup-pre-commit with: version: "3.0.4" @@ -352,8 +381,10 @@ jobs: - name: Commit Changes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -361,11 +392,13 @@ jobs: - name: Create release changes patch shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch - name: Upload Changes Diff Artifact uses: actions/upload-artifact@v3 + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch @@ -389,24 +422,19 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -414,835 +442,2562 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: + build-deps-onedir-linux: + name: Build Dependencies Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + uses: ./.github/workflows/build-deps-onedir-linux.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-deps-onedir-windows: + name: Build Dependencies Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + uses: ./.github/workflows/build-deps-onedir-windows.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-deps-onedir-macos: name: Build Dependencies Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml + uses: ./.github/workflows/build-deps-onedir-macos.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-salt-onedir-linux: + name: Build Salt Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} + needs: + - prepare-workflow + - build-deps-onedir-linux + - build-source-tarball + uses: ./.github/workflows/build-salt-onedir-linux.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-salt-onedir-windows: + name: Build Salt Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} + needs: + - prepare-workflow + - build-deps-onedir-windows + - build-source-tarball + uses: ./.github/workflows/build-salt-onedir-windows.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.7.0" - python-version-linux: "3.10.10" - python-version-macos: "3.10.10" - python-version-windows: "3.10.10" + relenv-version: "0.14.2" + python-version: "3.10.13" - build-salt-onedir: + build-salt-onedir-macos: name: Build Salt Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir + - build-deps-onedir-macos - build-source-tarball - uses: ./.github/workflows/build-salt-onedir.yml + uses: ./.github/workflows/build-salt-onedir-macos.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.7.0" - python-version-linux: "3.10.10" - python-version-macos: "3.10.10" - python-version-windows: "3.10.10" + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-rpm-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-rpm-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-rpm-pkgs: - name: Build RPM Packages + build-rpm-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + + build-deb-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deb-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-deb-pkgs: - name: Build DEB Packages + build-deb-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + + build-windows-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-windows-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-windows-pkgs: - name: Build Windows Packages + build-windows-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-windows uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" - build-macos-pkgs: - name: Build macOS Packages + build-macos-pkgs-onedir: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-macos uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - amazonlinux-2-pkg-tests: - name: Amazon Linux 2 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + build-macos-pkgs-src: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-macos + uses: ./.github/workflows/build-macos-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + + windows-2016-ci-deps: + name: Windows 2016 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + windows-2019-ci-deps: + name: Windows 2019 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2019 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + windows-2022-ci-deps: + name: Windows 2022 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2022 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-12-ci-deps: + name: macOS 12 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-13-ci-deps: + name: macOS 13 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + macos-13-xlarge-ci-deps: + name: macOS 13 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-8-ci-deps: + name: Alma Linux 8 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-8-arm64-ci-deps: + name: Alma Linux 8 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-9-ci-deps: + name: Alma Linux 9 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + almalinux-9-arm64-ci-deps: + name: Alma Linux 9 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: almalinux-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2-ci-deps: + name: Amazon Linux 2 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: distro-slug: amazonlinux-2 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centos-7-pkg-tests: - name: CentOS 7 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2-arm64-ci-deps: + name: Amazon Linux 2 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: centos-7 + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centosstream-8-pkg-tests: - name: CentOS Stream 8 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: centosstream-8 + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + archlinux-lts-ci-deps: + name: Arch Linux LTS Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: archlinux-lts + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centosstream-9-pkg-tests: - name: CentOS Stream 9 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + centos-7-ci-deps: + name: CentOS 7 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: centosstream-9 + distro-slug: centos-7 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: false - skip-junit-reports: false + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - debian-10-pkg-tests: - name: Debian 10 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + centos-7-arm64-ci-deps: + name: CentOS 7 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: debian-10 + distro-slug: centos-7-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-ci-deps: + name: CentOS Stream 8 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-arm64-ci-deps: + name: CentOS Stream 8 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-ci-deps: + name: CentOS Stream 9 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-arm64-ci-deps: + name: CentOS Stream 9 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-10-ci-deps: + name: Debian 10 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-ci-deps: + name: Debian 11 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-arm64-ci-deps: + name: Debian 11 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-ci-deps: + name: Debian 12 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-arm64-ci-deps: + name: Debian 12 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-ci-deps: + name: Fedora 37 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-arm64-ci-deps: + name: Fedora 37 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-ci-deps: + name: Fedora 38 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-arm64-ci-deps: + name: Fedora 38 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + opensuse-15-ci-deps: + name: Opensuse 15 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: opensuse-15 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-ci-deps: + name: Photon OS 3 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-arm64-ci-deps: + name: Photon OS 3 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-ci-deps: + name: Photon OS 4 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-arm64-ci-deps: + name: Photon OS 4 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-ci-deps: + name: Photon OS 5 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-arm64-ci-deps: + name: Photon OS 5 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-ci-deps: + name: Ubuntu 20.04 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-arm64-ci-deps: + name: Ubuntu 20.04 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-ci-deps: + name: Ubuntu 22.04 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-arm64-ci-deps: + name: Ubuntu 22.04 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2-pkg-tests: + name: Amazon Linux 2 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centos-7-pkg-tests: + name: CentOS 7 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centos-7-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-8-pkg-tests: + name: CentOS Stream 8 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-8-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-9-pkg-tests: + name: CentOS Stream 9 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-10-pkg-tests: + name: Debian 10 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-10-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-pkg-tests: + name: Debian 11 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-11-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-arm64-pkg-tests: + name: Debian 11 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-11-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-pkg-tests: + name: Debian 12 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-12-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-arm64-pkg-tests: + name: Debian 12 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-12-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-3-pkg-tests: + name: Photon OS 3 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-3-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-3-arm64-pkg-tests: + name: Photon OS 3 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-3-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-4-pkg-tests: + name: Photon OS 4 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-4-arm64-pkg-tests: + name: Photon OS 4 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true - debian-11-pkg-tests: - name: Debian 11 Package Tests + photonos-5-pkg-tests: + name: Photon OS 5 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-rpm-pkgs-onedir + - photonos-5-ci-deps uses: ./.github/workflows/test-packages-action.yml with: - distro-slug: debian-11 + distro-slug: photonos-5 + nox-session: ci-test-onedir platform: linux arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true - debian-11-arm64-pkg-tests: - name: Debian 11 Arm64 Package Tests + photonos-5-arm64-pkg-tests: + name: Photon OS 5 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-rpm-pkgs-onedir + - photonos-5-arm64-ci-deps uses: ./.github/workflows/test-packages-action.yml with: - distro-slug: debian-11-arm64 + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true ubuntu-2004-pkg-tests: - name: Ubuntu 20.04 Package Tests + name: Ubuntu 20.04 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir + - ubuntu-2004-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir platform: linux arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2004-arm64-pkg-tests: - name: Ubuntu 20.04 Arm64 Package Tests + name: Ubuntu 20.04 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir + - ubuntu-2004-arm64-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-pkg-tests: - name: Ubuntu 22.04 Package Tests + name: Ubuntu 22.04 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir + - ubuntu-2204-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir platform: linux arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} ubuntu-2204-arm64-pkg-tests: - name: Ubuntu 22.04 Arm64 Package Tests + name: Ubuntu 22.04 Arm64 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir + - ubuntu-2204-arm64-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} macos-12-pkg-tests: - name: macOS 12 Package Tests + name: macOS 12 Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-macos-pkgs + - build-macos-pkgs-onedir + - macos-12-ci-deps uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: macos-12 + nox-session: ci-test-onedir platform: darwin arch: x86_64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-13-pkg-tests: + name: macOS 13 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-13-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-13-xlarge-pkg-tests: + name: macOS 13 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-13-xlarge-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-nsis-pkg-tests: - name: Windows 2016 NSIS Package Tests + name: Windows 2016 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2016-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2016 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016-msi-pkg-tests: - name: Windows 2016 MSI Package Tests + name: Windows 2016 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2016-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2016 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-nsis-pkg-tests: - name: Windows 2019 NSIS Package Tests + name: Windows 2019 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2019 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-msi-pkg-tests: - name: Windows 2019 MSI Package Tests + name: Windows 2019 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2019 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-nsis-pkg-tests: - name: Windows 2022 NSIS Package Tests + name: Windows 2022 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2022 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-msi-pkg-tests: - name: Windows 2022 MSI Package Tests + name: Windows 2022 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2022 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016: - name: Windows 2016 + name: Windows 2016 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2016-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2016 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 windows-2019: - name: Windows 2019 + name: Windows 2019 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2019 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 windows-2022: - name: Windows 2022 + name: Windows 2022 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2022 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 macos-12: - name: macOS 12 + name: macOS 12 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - macos-12-ci-deps uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-12 nox-session: ci-test-onedir platform: darwin arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + macos-13: + name: macOS 13 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - macos-13-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + macos-13-xlarge: + name: macOS 13 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - macos-13-xlarge-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 almalinux-8: - name: Alma Linux 8 + name: Alma Linux 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - almalinux-8-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: almalinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 almalinux-9: - name: Alma Linux 9 + name: Alma Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - almalinux-9-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: almalinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 amazonlinux-2: - name: Amazon Linux 2 + name: Amazon Linux 2 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - amazonlinux-2-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: amazonlinux-2 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 archlinux-lts: - name: Arch Linux LTS + name: Arch Linux LTS Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - archlinux-lts-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: archlinux-lts nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 centos-7: - name: CentOS 7 + name: CentOS 7 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centos-7-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centos-7 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 centosstream-8: - name: CentOS Stream 8 + name: CentOS Stream 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centosstream-8-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centosstream-8 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 centosstream-9: - name: CentOS Stream 9 + name: CentOS Stream 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centosstream-9-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centosstream-9 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 debian-10: - name: Debian 10 + name: Debian 10 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-10-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-10 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 debian-11: - name: Debian 11 + name: Debian 11 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-11-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-11 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 debian-11-arm64: - name: Debian 11 Arm64 + name: Debian 11 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-11-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-11-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 - fedora-36: - name: Fedora 36 + debian-12: + name: Debian 12 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-12-ci-deps uses: ./.github/workflows/test-action.yml with: - distro-slug: fedora-36 + distro-slug: debian-12 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + debian-12-arm64: + name: Debian 12 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - debian-12-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 fedora-37: - name: Fedora 37 + name: Fedora 37 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - fedora-37-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: fedora-37 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 fedora-38: - name: Fedora 38 + name: Fedora 38 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - fedora-38-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: fedora-38 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 opensuse-15: - name: Opensuse 15 + name: Opensuse 15 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - opensuse-15-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: opensuse-15 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 photonos-3: - name: Photon OS 3 + name: Photon OS 3 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - photonos-3-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: photonos-3 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + photonos-3-arm64: + name: Photon OS 3 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-3-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 photonos-4: - name: Photon OS 4 + name: Photon OS 4 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - photonos-4-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: photonos-4 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + fips: true + + photonos-4-arm64: + name: Photon OS 4 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-4-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + fips: true + + photonos-5: + name: Photon OS 5 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-5-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: false + skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + fips: true + + photonos-5-arm64: + name: Photon OS 5 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-5-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + fips: true ubuntu-2004: - name: Ubuntu 20.04 + name: Ubuntu 20.04 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2004-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-20.04 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 ubuntu-2004-arm64: - name: Ubuntu 20.04 Arm64 + name: Ubuntu 20.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2004-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-20.04-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 ubuntu-2204: - name: Ubuntu 22.04 + name: Ubuntu 22.04 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2204-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-22.04 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 ubuntu-2204-arm64: - name: Ubuntu 22.04 Arm64 + name: Ubuntu 22.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2204-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-22.04-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: false skip-junit-reports: false + workflow-slug: scheduled + default-timeout: 360 + + combine-all-code-coverage: + name: Combine Code Coverage + if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + needs: + - prepare-workflow + - windows-2016-ci-deps + - windows-2019-ci-deps + - windows-2022-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - almalinux-8-ci-deps + - almalinux-8-arm64-ci-deps + - almalinux-9-ci-deps + - almalinux-9-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps + - archlinux-lts-ci-deps + - centos-7-ci-deps + - centos-7-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-9-ci-deps + - centosstream-9-arm64-ci-deps + - debian-10-ci-deps + - debian-11-ci-deps + - debian-11-arm64-ci-deps + - debian-12-ci-deps + - debian-12-arm64-ci-deps + - fedora-37-ci-deps + - fedora-37-arm64-ci-deps + - fedora-38-ci-deps + - fedora-38-arm64-ci-deps + - opensuse-15-ci-deps + - photonos-3-ci-deps + - photonos-3-arm64-ci-deps + - photonos-4-ci-deps + - photonos-4-arm64-ci-deps + - photonos-5-ci-deps + - photonos-5-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2204-ci-deps + - ubuntu-2204-arm64-ci-deps + - windows-2016 + - windows-2019 + - windows-2022 + - macos-12 + - macos-13 + - macos-13-xlarge + - almalinux-8 + - almalinux-9 + - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 + - archlinux-lts + - centos-7 + - centosstream-8 + - centosstream-9 + - debian-10 + - debian-11 + - debian-11-arm64 + - debian-12 + - debian-12-arm64 + - fedora-37 + - fedora-38 + - opensuse-15 + - photonos-3 + - photonos-3-arm64 + - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 + - ubuntu-2004 + - ubuntu-2004-arm64 + - ubuntu-2204 + - ubuntu-2204-arm64 + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.10 + if: ${{ github.event.repository.private == false }} + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + id: python-tools-scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage + + - name: Install Nox + run: | + python3 -m pip install 'nox==2022.8.7' + + + + - name: Get coverage reports + id: get-coverage-reports + uses: actions/download-artifact@v3 + with: + name: all-testrun-coverage-artifacts + path: artifacts/coverage/ + + - name: Display structure of downloaded files + run: tree -a artifacts/ + + - name: Install Codecov CLI + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + mv ./codecov /usr/local/bin/ + + - name: Create XML Coverage Reports + run: | + nox --force-color -e create-xml-coverage-reports + + - name: Upload Code Coverage To Codecov + run: | + tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/ + + - name: Combine Code Coverage + run: | + nox --force-color -e combine-coverage + + - name: Report Salt Code Coverage + run: | + nox --force-color -e coverage-report -- salt + + - name: Create Salt Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report -- salt + + - name: Create Salt Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report -- salt + + - name: Upload Salt Code Coverage HTML Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-salt-html-report + path: artifacts/coverage/html/salt + retention-days: 7 + if-no-files-found: error + + - name: Report Combined Code Coverage + run: | + nox --force-color -e coverage-report + + - name: Create Combined Code Coverage JSON Report + run: | + nox --force-color -e create-json-coverage-reports + + - name: Upload Combined Code Coverage JSON Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-full-json-report + path: artifacts/coverage/coverage.json + retention-days: 7 + if-no-files-found: error + + - name: Create Combined Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report + + - name: Upload Combined Code Coverage HTML Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-full-html-report + path: artifacts/coverage/html/full + retention-days: 7 + if-no-files-found: error set-pipeline-exit-status: # This step is just so we can make github require this step, to pass checks # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - workflow-requirements - trigger-branch-scheduled-builds @@ -1250,15 +3005,70 @@ jobs: - pre-commit - lint - build-docs - - build-deps-onedir - - build-salt-onedir + - build-deps-onedir-linux + - build-deps-onedir-windows + - build-deps-onedir-macos + - build-salt-onedir-linux + - build-salt-onedir-windows + - build-salt-onedir-macos + - build-rpm-pkgs-src + - build-deb-pkgs-src + - build-windows-pkgs-src + - build-macos-pkgs-src + - combine-all-code-coverage + - windows-2016-ci-deps + - windows-2019-ci-deps + - windows-2022-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - almalinux-8-ci-deps + - almalinux-8-arm64-ci-deps + - almalinux-9-ci-deps + - almalinux-9-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps + - archlinux-lts-ci-deps + - centos-7-ci-deps + - centos-7-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-9-ci-deps + - centosstream-9-arm64-ci-deps + - debian-10-ci-deps + - debian-11-ci-deps + - debian-11-arm64-ci-deps + - debian-12-ci-deps + - debian-12-arm64-ci-deps + - fedora-37-ci-deps + - fedora-37-arm64-ci-deps + - fedora-38-ci-deps + - fedora-38-arm64-ci-deps + - opensuse-15-ci-deps + - photonos-3-ci-deps + - photonos-3-arm64-ci-deps + - photonos-4-ci-deps + - photonos-4-arm64-ci-deps + - photonos-5-ci-deps + - photonos-5-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2204-ci-deps + - ubuntu-2204-arm64-ci-deps - windows-2016 - windows-2019 - windows-2022 - macos-12 + - macos-13 + - macos-13-xlarge - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -1266,28 +3076,47 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 + - debian-12 + - debian-12-arm64 - fedora-37 - fedora-38 - opensuse-15 - photonos-3 + - photonos-3-arm64 - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 - ubuntu-2004 - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests + - debian-12-pkg-tests + - debian-12-arm64-pkg-tests + - photonos-3-pkg-tests + - photonos-3-arm64-pkg-tests + - photonos-4-pkg-tests + - photonos-4-arm64-pkg-tests + - photonos-5-pkg-tests + - photonos-5-arm64-pkg-tests - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests - ubuntu-2204-arm64-pkg-tests - macos-12-pkg-tests + - macos-13-pkg-tests + - macos-13-xlarge-pkg-tests - windows-2016-nsis-pkg-tests - windows-2016-msi-pkg-tests - windows-2019-nsis-pkg-tests diff --git a/.github/workflows/scripts/update_winrepo.py b/.github/workflows/scripts/update_winrepo.py new file mode 100644 index 000000000000..b12d6558a4d9 --- /dev/null +++ b/.github/workflows/scripts/update_winrepo.py @@ -0,0 +1,31 @@ +import argparse +import os + +# Where are we +print(os.getcwd()) + +arg_parser = argparse.ArgumentParser() +arg_parser.add_argument("-f", "--file", help="the winrepo file to edit") +arg_parser.add_argument("-v", "--version", help="The version to add") + +args = arg_parser.parse_args() +file = args.file +version = args.version + +if version.startswith("v"): + version = version[1:] + +with open(file) as f: + current_contents = f.readlines() + +new_contents = [] + +added = False +for line in current_contents: + new_contents.append(line) + if "for version in [" in line and not added: + new_contents.append(f" '{version}',\n") + added = True + +with open(file, "w") as f: + f.writelines(new_contents) diff --git a/.github/workflows/staging.yml b/.github/workflows/staging.yml index 9e5590ececcf..0940d371b7ef 100644 --- a/.github/workflows/staging.yml +++ b/.github/workflows/staging.yml @@ -3,7 +3,7 @@ --- name: Stage Release -run-name: "Stage Release (${{ format('Branch: {0} // Version: {1}', github.ref_name, inputs.salt-version) }})" +run-name: "Stage Release (branch: ${{ github.ref_name }}; version: ${{ inputs.salt-version }})" on: workflow_dispatch: @@ -14,6 +14,14 @@ on: description: > The Salt version to set prior to building packages and staging the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + sign-windows-packages: + type: boolean + default: false + description: Sign Windows Packages + skip-test-pypi-publish: + type: boolean + default: false + description: Skip publishing the source package to Test PyPi(For example, CVE releases) skip-salt-test-suite: type: boolean default: false @@ -22,16 +30,20 @@ on: type: boolean default: false description: Skip running the Salt packages test suite. + skip-salt-pkg-download-test-suite: + type: boolean + default: false + description: Skip running the Salt packages download test suite. env: COLUMNS: 190 - CACHE_SEED: SEED-2 # Bump the number to invalidate all caches + CACHE_SEED: SEED-7 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" - REPO_BASE_URL: "https://${{ secrets.SALT_REPO_USER }}:${{ secrets.SALT_REPO_PASS }}@${{ secrets.SALT_REPO_DOMAIN }}" permissions: contents: read # for dorny/paths-filter to fetch a list of changed files pull-requests: read # for dorny/paths-filter to read pull requests + actions: read # for technote-space/workflow-conclusion-action to get the job statuses concurrency: group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.repository }} @@ -41,7 +53,7 @@ jobs: check-requirements: name: Check Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: staging-check steps: - name: Check For Admin Permission @@ -50,31 +62,24 @@ jobs: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('["master", "3006.x"]'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: master, 3006.x" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - check-requirements outputs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} + pull-labels: ${{ steps.get-pull-labels.outputs.labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Full clone to also get the tags to get the right salt version @@ -120,9 +125,11 @@ jobs: - *doc_requirements workflows: - added|modified: + - cicd/shared-gh-workflows-context.yml - .github/actions/**/action.yml - .github/workflows/*.yml - .github/workflows/templates/*.yml.jinja2 + - tools/precommit/workflows.py salt: - added|modified: &salt_added_modified - setup.py @@ -160,11 +167,18 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -172,9 +186,22 @@ jobs: salt-version: "${{ inputs.salt-version }}" validate-version: true + - name: Get Pull Request Test Labels + id: get-pull-labels + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} + - name: Check Existing Releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + if [ "${{ github.event.repository.private }}" = "true" ]; then + tools pkg repo confirm-unreleased --repository saltstack/salt ${{ steps.setup-salt-version.outputs.salt-version }} + fi - name: Write Changed Files To A Local File run: @@ -207,12 +234,32 @@ jobs: - name: Define Jobs id: define-jobs run: | - tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }} ${{ github.event_name }} changed-files.json + tools ci define-jobs${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }} ${{ github.event_name }} changed-files.json - name: Check Defined Jobs run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -234,10 +281,7 @@ jobs: name: testrun-changed-files.txt path: testrun-changed-files.txt - - name: Set Cache Seed Output - id: set-cache-seed - run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + pre-commit: name: Pre-Commit if: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} @@ -269,30 +313,12 @@ jobs: needs: - prepare-workflow steps: - - uses: actions/checkout@v3 - - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -301,28 +327,30 @@ jobs: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" release: true - # TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning - # issues on pre-3006 development versions on deb-based distros. - name: Update Debian changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" + tools changelog update-deb --draft + tools changelog update-deb - name: Update RPM changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-rpm --draft tools changelog update-rpm - name: Update Release Notes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-release-notes --draft --release tools changelog update-release-notes --release - name: Generate MAN Pages shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}" SALT_ON_SALTSTACK: "1" @@ -331,22 +359,26 @@ jobs: - name: Update Changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-changelog-md --draft tools changelog update-changelog-md - name: Show Changes Diff shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git diff --color - name: Configure Git shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git config --global user.name "Salt Project Packaging" git config --global user.email saltproject-packaging@vmware.com - name: Setup Pre-Commit + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: ./.github/actions/setup-pre-commit with: version: "3.0.4" @@ -354,8 +386,10 @@ jobs: - name: Commit Changes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -363,11 +397,13 @@ jobs: - name: Create release changes patch shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch - name: Upload Changes Diff Artifact uses: actions/upload-artifact@v3 + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch @@ -391,24 +427,19 @@ jobs: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -416,831 +447,2384 @@ jobs: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - build-deps-onedir: + build-deps-onedir-linux: + name: Build Dependencies Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + uses: ./.github/workflows/build-deps-onedir-linux.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-deps-onedir-windows: + name: Build Dependencies Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + uses: ./.github/workflows/build-deps-onedir-windows.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-deps-onedir-macos: name: Build Dependencies Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-onedir'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml + uses: ./.github/workflows/build-deps-onedir-macos.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.7.0" - python-version-linux: "3.10.10" - python-version-macos: "3.10.10" - python-version-windows: "3.10.10" + relenv-version: "0.14.2" + python-version: "3.10.13" - build-salt-onedir: + build-salt-onedir-linux: name: Build Salt Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} needs: - prepare-workflow - - build-deps-onedir + - build-deps-onedir-linux - build-source-tarball - uses: ./.github/workflows/build-salt-onedir.yml + uses: ./.github/workflows/build-salt-onedir-linux.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - relenv-version: "0.7.0" - python-version-linux: "3.10.10" - python-version-macos: "3.10.10" - python-version-windows: "3.10.10" + relenv-version: "0.14.2" + python-version: "3.10.13" - build-rpm-pkgs: - name: Build RPM Packages + build-salt-onedir-windows: + name: Build Salt Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} + needs: + - prepare-workflow + - build-deps-onedir-windows + - build-source-tarball + uses: ./.github/workflows/build-salt-onedir-windows.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-salt-onedir-macos: + name: Build Salt Onedir + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-salt-onedir'] }} + needs: + - prepare-workflow + - build-deps-onedir-macos + - build-source-tarball + uses: ./.github/workflows/build-salt-onedir-macos.yml + with: + cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + relenv-version: "0.14.2" + python-version: "3.10.13" + + build-rpm-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-rpm-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" + + build-rpm-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux uses: ./.github/workflows/build-rpm-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + + build-deb-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deb-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" - build-deb-pkgs: - name: Build DEB Packages + build-deb-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux uses: ./.github/workflows/build-deb-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" - build-windows-pkgs: - name: Build Windows Packages + build-windows-pkgs-onedir: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-windows uses: ./.github/workflows/build-windows-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - build-macos-pkgs: - name: Build macOS Packages + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" + environment: staging + sign-packages: ${{ inputs.sign-windows-packages }} + secrets: inherit + + build-windows-pkgs-src: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-windows-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + environment: staging + sign-packages: ${{ inputs.sign-windows-packages }} + secrets: inherit + + build-macos-pkgs-onedir: + name: Build Packages + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-macos + uses: ./.github/workflows/build-macos-packages.yml + with: + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "onedir" + environment: staging + sign-packages: true + secrets: inherit + + build-macos-pkgs-src: + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-macos uses: ./.github/workflows/build-macos-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "0.14.2" + python-version: "3.10.13" + source: "src" + environment: staging + sign-packages: true + secrets: inherit + + windows-2016-ci-deps: + name: Windows 2016 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - amazonlinux-2-pkg-tests: - name: Amazon Linux 2 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + windows-2019-ci-deps: + name: Windows 2019 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: amazonlinux-2 - platform: linux - arch: x86_64 + distro-slug: windows-2019 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centos-7-pkg-tests: - name: CentOS 7 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + windows-2022-ci-deps: + name: Windows 2022 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-windows + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: centos-7 - platform: linux - arch: x86_64 + distro-slug: windows-2022 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centosstream-8-pkg-tests: - name: CentOS Stream 8 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + macos-12-ci-deps: + name: macOS 12 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml with: - distro-slug: centosstream-8 - platform: linux + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: darwin arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - centosstream-9-pkg-tests: - name: CentOS Stream 9 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + macos-13-ci-deps: + name: macOS 13 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-rpm-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml with: - distro-slug: centosstream-9 - platform: linux + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: rpm - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - debian-10-pkg-tests: - name: Debian 10 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + macos-13-xlarge-ci-deps: + name: macOS 13 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-macos + uses: ./.github/workflows/build-deps-ci-action-macos.yml with: - distro-slug: debian-10 - platform: linux - arch: x86_64 + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - debian-11-pkg-tests: - name: Debian 11 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + almalinux-8-ci-deps: + name: Alma Linux 8 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: debian-11 + distro-slug: almalinux-8 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - debian-11-arm64-pkg-tests: - name: Debian 11 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + almalinux-8-arm64-ci-deps: + name: Alma Linux 8 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: debian-11-arm64 + distro-slug: almalinux-8-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - ubuntu-2004-pkg-tests: - name: Ubuntu 20.04 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + almalinux-9-ci-deps: + name: Alma Linux 9 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: ubuntu-20.04 + distro-slug: almalinux-9 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - ubuntu-2004-arm64-pkg-tests: - name: Ubuntu 20.04 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + almalinux-9-arm64-ci-deps: + name: Alma Linux 9 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: ubuntu-20.04-arm64 + distro-slug: almalinux-9-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - ubuntu-2204-pkg-tests: - name: Ubuntu 22.04 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2-ci-deps: + name: Amazon Linux 2 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: ubuntu-22.04 + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - ubuntu-2204-arm64-pkg-tests: - name: Ubuntu 22.04 Arm64 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2-arm64-ci-deps: + name: Amazon Linux 2 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-deb-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: ubuntu-22.04-arm64 + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: deb - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - macos-12-pkg-tests: - name: macOS 12 Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + amazonlinux-2023-ci-deps: + name: Amazon Linux 2023 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-macos-pkgs - uses: ./.github/workflows/test-packages-action-macos.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: macos-12 - platform: darwin + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - windows-2016-nsis-pkg-tests: - name: Windows 2016 NSIS Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + amazonlinux-2023-arm64-ci-deps: + name: Amazon Linux 2023 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml with: - distro-slug: windows-2016 - platform: windows - arch: amd64 + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 - skip-code-coverage: true - skip-junit-reports: true + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 - windows-2016-msi-pkg-tests: - name: Windows 2016 MSI Package Tests - if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + archlinux-lts-ci-deps: + name: Arch Linux LTS Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs - uses: ./.github/workflows/test-packages-action.yml + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: archlinux-lts + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centos-7-ci-deps: + name: CentOS 7 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centos-7-arm64-ci-deps: + name: CentOS 7 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centos-7-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-ci-deps: + name: CentOS Stream 8 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-8-arm64-ci-deps: + name: CentOS Stream 8 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-8-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-ci-deps: + name: CentOS Stream 9 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + centosstream-9-arm64-ci-deps: + name: CentOS Stream 9 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-10-ci-deps: + name: Debian 10 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-ci-deps: + name: Debian 11 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-11-arm64-ci-deps: + name: Debian 11 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-ci-deps: + name: Debian 12 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + debian-12-arm64-ci-deps: + name: Debian 12 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-ci-deps: + name: Fedora 37 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-37-arm64-ci-deps: + name: Fedora 37 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-37-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-ci-deps: + name: Fedora 38 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + fedora-38-arm64-ci-deps: + name: Fedora 38 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: fedora-38-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + opensuse-15-ci-deps: + name: Opensuse 15 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: opensuse-15 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-ci-deps: + name: Photon OS 3 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-3-arm64-ci-deps: + name: Photon OS 3 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-ci-deps: + name: Photon OS 4 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-4-arm64-ci-deps: + name: Photon OS 4 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-ci-deps: + name: Photon OS 5 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + photonos-5-arm64-ci-deps: + name: Photon OS 5 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-ci-deps: + name: Ubuntu 20.04 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2004-arm64-ci-deps: + name: Ubuntu 20.04 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-ci-deps: + name: Ubuntu 22.04 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + ubuntu-2204-arm64-ci-deps: + name: Ubuntu 22.04 Arm64 Deps + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-salt-onedir-linux + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + python-version: "3.10" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + + amazonlinux-2-pkg-tests: + name: Amazon Linux 2 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2-arm64-pkg-tests: + name: Amazon Linux 2 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-pkg-tests: + name: Amazon Linux 2023 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + amazonlinux-2023-arm64-pkg-tests: + name: Amazon Linux 2023 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centos-7-pkg-tests: + name: CentOS 7 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centos-7-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centos-7 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-8-pkg-tests: + name: CentOS Stream 8 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-8-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-8 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-9-pkg-tests: + name: CentOS Stream 9 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + centosstream-9-arm64-pkg-tests: + name: CentOS Stream 9 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - centosstream-9-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: centosstream-9-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-10-pkg-tests: + name: Debian 10 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-10-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-10 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-pkg-tests: + name: Debian 11 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-11-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-11-arm64-pkg-tests: + name: Debian 11 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-11-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-11-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-pkg-tests: + name: Debian 12 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-12-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-12 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + debian-12-arm64-pkg-tests: + name: Debian 12 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - debian-12-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-3-pkg-tests: + name: Photon OS 3 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-3-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-3 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-3-arm64-pkg-tests: + name: Photon OS 3 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-3-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + photonos-4-pkg-tests: + name: Photon OS 4 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-4-arm64-pkg-tests: + name: Photon OS 4 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-4-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-5-pkg-tests: + name: Photon OS 5 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-5-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + photonos-5-arm64-pkg-tests: + name: Photon OS 5 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-rpm-pkgs-onedir + - photonos-5-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: rpm + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + fips: true + + ubuntu-2004-pkg-tests: + name: Ubuntu 20.04 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - ubuntu-2004-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-20.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + ubuntu-2004-arm64-pkg-tests: + name: Ubuntu 20.04 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - ubuntu-2004-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-20.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + ubuntu-2204-pkg-tests: + name: Ubuntu 22.04 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - ubuntu-2204-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-22.04 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + ubuntu-2204-arm64-pkg-tests: + name: Ubuntu 22.04 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-deb-pkgs-onedir + - ubuntu-2204-arm64-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: ubuntu-22.04-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: deb + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-12-pkg-tests: + name: macOS 12 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-12-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-12 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-13-pkg-tests: + name: macOS 13 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-13-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + macos-13-xlarge-pkg-tests: + name: macOS 13 Arm64 Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - build-macos-pkgs-onedir + - macos-13-xlarge-ci-deps + uses: ./.github/workflows/test-packages-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: macos + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + windows-2016-nsis-pkg-tests: + name: Windows 2016 NSIS Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-windows-pkgs-onedir + - windows-2016-ci-deps + uses: ./.github/workflows/test-packages-action.yml + with: + distro-slug: windows-2016 + nox-session: ci-test-onedir + platform: windows + arch: amd64 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + pkg-type: NSIS + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + + windows-2016-msi-pkg-tests: + name: Windows 2016 MSI Package Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - build-windows-pkgs-onedir + - windows-2016-ci-deps + uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2016 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-nsis-pkg-tests: - name: Windows 2019 NSIS Package Tests + name: Windows 2019 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2019 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2019-msi-pkg-tests: - name: Windows 2019 MSI Package Tests + name: Windows 2019 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2019 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-nsis-pkg-tests: - name: Windows 2022 NSIS Package Tests + name: Windows 2022 NSIS Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2022 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: NSIS - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2022-msi-pkg-tests: - name: Windows 2022 MSI Package Tests + name: Windows 2022 MSI Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: windows-2022 + nox-session: ci-test-onedir platform: windows arch: amd64 salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: MSI - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + nox-version: 2022.8.7 + python-version: "3.10" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} windows-2016: - name: Windows 2016 + name: Windows 2016 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2016-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2016 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 windows-2019: - name: Windows 2019 + name: Windows 2019 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2019-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2019 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 windows-2022: - name: Windows 2022 + name: Windows 2022 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - windows-2022-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: windows-2022 nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 macos-12: - name: macOS 12 + name: macOS 12 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - macos-12-ci-deps uses: ./.github/workflows/test-action-macos.yml with: distro-slug: macos-12 nox-session: ci-test-onedir platform: darwin arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + macos-13: + name: macOS 13 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - macos-13-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13 + nox-session: ci-test-onedir + platform: darwin + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + macos-13-xlarge: + name: macOS 13 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + needs: + - prepare-workflow + - macos-13-xlarge-ci-deps + uses: ./.github/workflows/test-action-macos.yml + with: + distro-slug: macos-13-xlarge + nox-session: ci-test-onedir + platform: darwin + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 almalinux-8: - name: Alma Linux 8 + name: Alma Linux 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - almalinux-8-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: almalinux-8 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 almalinux-9: - name: Alma Linux 9 + name: Alma Linux 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - almalinux-9-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: almalinux-9 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 amazonlinux-2: - name: Amazon Linux 2 + name: Amazon Linux 2 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - amazonlinux-2-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: amazonlinux-2 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + amazonlinux-2-arm64: + name: Amazon Linux 2 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + amazonlinux-2023: + name: Amazon Linux 2023 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + amazonlinux-2023-arm64: + name: Amazon Linux 2023 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - amazonlinux-2023-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: amazonlinux-2023-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 archlinux-lts: - name: Arch Linux LTS + name: Arch Linux LTS Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - archlinux-lts-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: archlinux-lts nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 centos-7: - name: CentOS 7 + name: CentOS 7 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centos-7-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centos-7 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 centosstream-8: - name: CentOS Stream 8 + name: CentOS Stream 8 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centosstream-8-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centosstream-8 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 centosstream-9: - name: CentOS Stream 9 + name: CentOS Stream 9 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - centosstream-9-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: centosstream-9 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 debian-10: - name: Debian 10 + name: Debian 10 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-10-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-10 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 debian-11: - name: Debian 11 + name: Debian 11 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-11-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-11 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 debian-11-arm64: - name: Debian 11 Arm64 + name: Debian 11 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-11-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: debian-11-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 - fedora-36: - name: Fedora 36 + debian-12: + name: Debian 12 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - debian-12-ci-deps uses: ./.github/workflows/test-action.yml with: - distro-slug: fedora-36 + distro-slug: debian-12 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + debian-12-arm64: + name: Debian 12 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - debian-12-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: debian-12-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 fedora-37: - name: Fedora 37 + name: Fedora 37 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - fedora-37-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: fedora-37 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 fedora-38: - name: Fedora 38 + name: Fedora 38 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - fedora-38-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: fedora-38 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 opensuse-15: - name: Opensuse 15 + name: Opensuse 15 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - opensuse-15-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: opensuse-15 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 photonos-3: - name: Photon OS 3 + name: Photon OS 3 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - photonos-3-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: photonos-3 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + + photonos-3-arm64: + name: Photon OS 3 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-3-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-3-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 photonos-4: - name: Photon OS 4 + name: Photon OS 4 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - photonos-4-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: photonos-4 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + fips: true + + photonos-4-arm64: + name: Photon OS 4 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-4-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-4-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + fips: true + + photonos-5: + name: Photon OS 5 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-5-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-5 + nox-session: ci-test-onedir + platform: linux + arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" + testrun: ${{ needs.prepare-workflow.outputs.testrun }} + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + skip-code-coverage: true + skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + fips: true + + photonos-5-arm64: + name: Photon OS 5 Arm64 Test + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - photonos-5-arm64-ci-deps + uses: ./.github/workflows/test-action.yml + with: + distro-slug: photonos-5-arm64 + nox-session: ci-test-onedir + platform: linux + arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 + fips: true ubuntu-2004: - name: Ubuntu 20.04 + name: Ubuntu 20.04 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2004-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-20.04 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 ubuntu-2004-arm64: - name: Ubuntu 20.04 Arm64 + name: Ubuntu 20.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2004-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-20.04-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 ubuntu-2204: - name: Ubuntu 22.04 + name: Ubuntu 22.04 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2204-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-22.04 nox-session: ci-test-onedir platform: linux arch: x86_64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 ubuntu-2204-arm64: - name: Ubuntu 22.04 Arm64 + name: Ubuntu 22.04 Arm64 Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - ubuntu-2204-arm64-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: ubuntu-22.04-arm64 nox-session: ci-test-onedir platform: linux arch: aarch64 + nox-version: 2022.8.7 + gh-actions-python-version: "3.10" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.10 + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 skip-code-coverage: true skip-junit-reports: true + workflow-slug: staging + default-timeout: 180 build-src-repo: - name: Build Source Repository + name: Build Repository environment: staging runs-on: - self-hosted @@ -1248,12 +2832,26 @@ jobs: - repo-staging needs: - prepare-workflow - - build-salt-onedir + - build-source-tarball + strategy: + fail-fast: false + matrix: + pkg-type: + - src + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Source Tarball uses: actions/download-artifact@v3 @@ -1297,17 +2895,22 @@ jobs: - name: Create Repository run: | tools pkg repo create src --key-id=64CBBC8173D76B3F \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error @@ -1321,7 +2924,7 @@ jobs: archive-name: src-repo build-deb-repo: - name: Build DEB Repository + name: Build Repository environment: staging runs-on: - self-hosted @@ -1329,38 +2932,54 @@ jobs: - repo-staging needs: - prepare-workflow - - build-deb-pkgs + - build-deb-pkgs-onedir strategy: fail-fast: false matrix: include: - - distro: debian + - pkg-type: deb + distro: debian version: "10" arch: x86_64 - - distro: debian + - pkg-type: deb + distro: debian version: "10" arch: aarch64 - - distro: debian + - pkg-type: deb + distro: debian version: "11" arch: x86_64 - - distro: debian + - pkg-type: deb + distro: debian version: "11" arch: aarch64 - - distro: ubuntu + - pkg-type: deb + distro: debian + version: "12" + arch: x86_64 + - pkg-type: deb + distro: debian + version: "12" + arch: aarch64 + - pkg-type: deb + distro: ubuntu version: "20.04" arch: x86_64 - - distro: ubuntu + - pkg-type: deb + distro: ubuntu version: "20.04" arch: aarch64 - - distro: ubuntu + - pkg-type: deb + distro: ubuntu version: "22.04" arch: x86_64 - - distro: ubuntu + - pkg-type: deb + distro: ubuntu version: "22.04" arch: aarch64 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download System Dependencies run: | @@ -1369,6 +2988,14 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download DEB Packages uses: actions/download-artifact@v3 @@ -1412,7 +3039,7 @@ jobs: - name: Create Repository run: | tools pkg repo create deb --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1426,7 +3053,7 @@ jobs: archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo build-rpm-repo: - name: Build RPM Repository + name: Build Repository environment: staging runs-on: - self-hosted @@ -1434,35 +3061,146 @@ jobs: - repo-staging needs: - prepare-workflow - - build-rpm-pkgs + - build-rpm-pkgs-onedir strategy: fail-fast: false matrix: include: - - distro: amazon + - pkg-type: rpm + distro: amazon version: "2" arch: x86_64 - - distro: redhat + - pkg-type: rpm + distro: amazon + version: "2" + arch: arm64 + - pkg-type: rpm + distro: amazon + version: "2" + arch: aarch64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: x86_64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: arm64 + - pkg-type: rpm + distro: amazon + version: "2023" + arch: aarch64 + - pkg-type: rpm + distro: redhat + version: "7" + arch: x86_64 + - pkg-type: rpm + distro: redhat + version: "7" + arch: arm64 + - pkg-type: rpm + distro: redhat version: "7" + arch: aarch64 + - pkg-type: rpm + distro: redhat + version: "8" arch: x86_64 - - distro: redhat + - pkg-type: rpm + distro: redhat version: "8" + arch: arm64 + - pkg-type: rpm + distro: redhat + version: "8" + arch: aarch64 + - pkg-type: rpm + distro: redhat + version: "9" arch: x86_64 - - distro: redhat + - pkg-type: rpm + distro: redhat + version: "9" + arch: arm64 + - pkg-type: rpm + distro: redhat version: "9" + arch: aarch64 + - pkg-type: rpm + distro: fedora + version: "36" arch: x86_64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "36" + arch: arm64 + - pkg-type: rpm + distro: fedora + version: "36" + arch: aarch64 + - pkg-type: rpm + distro: fedora + version: "37" arch: x86_64 - - distro: fedora + - pkg-type: rpm + distro: fedora + version: "37" + arch: arm64 + - pkg-type: rpm + distro: fedora version: "37" + arch: aarch64 + - pkg-type: rpm + distro: fedora + version: "38" arch: x86_64 - - distro: fedora + - pkg-type: rpm + distro: fedora version: "38" + arch: arm64 + - pkg-type: rpm + distro: fedora + version: "38" + arch: aarch64 + - pkg-type: rpm + distro: photon + version: "3" + arch: x86_64 + - pkg-type: rpm + distro: photon + version: "3" + arch: arm64 + - pkg-type: rpm + distro: photon + version: "3" + arch: aarch64 + - pkg-type: rpm + distro: photon + version: "4" + arch: x86_64 + - pkg-type: rpm + distro: photon + version: "4" + arch: arm64 + - pkg-type: rpm + distro: photon + version: "4" + arch: aarch64 + - pkg-type: rpm + distro: photon + version: "5" arch: x86_64 + - pkg-type: rpm + distro: photon + version: "5" + arch: arm64 + - pkg-type: rpm + distro: photon + version: "5" + arch: aarch64 steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download System Dependencies run: | @@ -1471,11 +3209,19 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG @@ -1512,9 +3258,14 @@ jobs: mkdir -p artifacts/pkgs/repo - name: Create Repository + env: + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | tools pkg repo create rpm --key-id=64CBBC8173D76B3F --distro-arch=${{ matrix.arch }} \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo @@ -1528,7 +3279,7 @@ jobs: archive-name: ${{ matrix.distro }}-${{ matrix.version }}-${{ matrix.arch }}-repo build-windows-repo: - name: Build Windows Repository + name: Build Repository environment: staging runs-on: - self-hosted @@ -1536,12 +3287,26 @@ jobs: - repo-staging needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + strategy: + fail-fast: false + matrix: + pkg-type: + - windows + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Windows NSIS x86 Packages uses: actions/download-artifact@v3 @@ -1603,7 +3368,7 @@ jobs: - name: Create Repository run: | tools pkg repo create windows --key-id=64CBBC8173D76B3F \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact @@ -1616,7 +3381,7 @@ jobs: archive-name: windows-repo build-macos-repo: - name: Build macOS Repository + name: Build Repository environment: staging runs-on: - self-hosted @@ -1624,12 +3389,26 @@ jobs: - repo-staging needs: - prepare-workflow - - build-macos-pkgs + - build-macos-pkgs-onedir + strategy: + fail-fast: false + matrix: + pkg-type: + - macos + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download macOS x86_64 Packages uses: actions/download-artifact@v3 @@ -1637,6 +3416,12 @@ jobs: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86_64-macos path: artifacts/pkgs/incoming + - name: Download macOS Arch64 Packages + uses: actions/download-artifact@v3 + with: + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-aarch64-macos + path: artifacts/pkgs/incoming + - name: Setup GnuPG run: | sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg @@ -1673,7 +3458,7 @@ jobs: - name: Create Repository run: | tools pkg repo create macos --key-id=64CBBC8173D76B3F \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact @@ -1686,7 +3471,7 @@ jobs: archive-name: macos-repo build-onedir-repo: - name: Build Onedir Repository + name: Build Repository environment: staging runs-on: - self-hosted @@ -1694,12 +3479,28 @@ jobs: - repo-staging needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-linux + - build-salt-onedir-macos + - build-salt-onedir-windows + strategy: + fail-fast: false + matrix: + pkg-type: + - onedir + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Linux x86_64 Onedir Archive uses: actions/download-artifact@v3 @@ -1719,6 +3520,12 @@ jobs: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-darwin-x86_64.tar.xz path: artifacts/pkgs/incoming + - name: Download macOS arm64 Onedir Archive + uses: actions/download-artifact@v3 + with: + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-darwin-aarch64.tar.xz + path: artifacts/pkgs/incoming + - name: Download Windows amd64 Onedir Archive uses: actions/download-artifact@v3 with: @@ -1779,7 +3586,7 @@ jobs: - name: Create Repository run: | tools pkg repo create onedir --key-id=64CBBC8173D76B3F \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact @@ -1793,6 +3600,7 @@ jobs: publish-repositories: name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -1808,7 +3616,7 @@ jobs: - build-onedir-repo steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -1818,6 +3626,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 @@ -1835,9 +3645,11 @@ jobs: tree -a artifacts/pkgs/repo/ - name: Upload Repository Contents (staging) + env: + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo publish staging \ - ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} artifacts/pkgs/repo/ + tools pkg repo publish staging --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/ upload-release-artifacts: name: Upload Release Artifacts @@ -1851,7 +3663,7 @@ jobs: - linux - repo-staging steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -1861,6 +3673,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Release Patch uses: actions/download-artifact@v3 @@ -1886,22 +3700,141 @@ jobs: name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub path: artifacts/release + - name: Show Release Artifacts + run: | + tree -a artifacts/release + - name: Upload Release Artifacts run: | tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release + - name: Upload PyPi Artifacts + uses: actions/upload-artifact@v3 + with: + name: pypi-artifacts + path: | + artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc + retention-days: 7 + if-no-files-found: error + + pkg-download-tests: + name: Package Downloads + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + needs: + - prepare-workflow + - publish-repositories + - almalinux-8-arm64-ci-deps + - almalinux-8-ci-deps + - almalinux-9-arm64-ci-deps + - almalinux-9-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2023-arm64-ci-deps + - amazonlinux-2023-ci-deps + - centos-7-arm64-ci-deps + - centos-7-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-9-arm64-ci-deps + - centosstream-9-ci-deps + - debian-10-ci-deps + - debian-11-arm64-ci-deps + - debian-11-ci-deps + - debian-12-arm64-ci-deps + - debian-12-ci-deps + - fedora-37-arm64-ci-deps + - fedora-37-ci-deps + - fedora-38-arm64-ci-deps + - fedora-38-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - photonos-3-arm64-ci-deps + - photonos-3-ci-deps + - photonos-4-arm64-ci-deps + - photonos-4-ci-deps + - photonos-5-arm64-ci-deps + - photonos-5-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2204-arm64-ci-deps + - ubuntu-2204-ci-deps + - windows-2022-ci-deps + - build-salt-onedir-linux + - build-salt-onedir-macos + - build-salt-onedir-windows + uses: ./.github/workflows/test-package-downloads-action.yml + with: + nox-session: ci-test-onedir + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|3.10.13 + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: staging + nox-version: 2022.8.7 + python-version: "3.10" + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit + publish-pypi: name: Publish to PyPi(test) + if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} needs: - prepare-workflow - upload-release-artifacts + - windows-2016-ci-deps + - windows-2019-ci-deps + - windows-2022-ci-deps + - macos-12-ci-deps + - macos-13-ci-deps + - macos-13-xlarge-ci-deps + - almalinux-8-ci-deps + - almalinux-8-arm64-ci-deps + - almalinux-9-ci-deps + - almalinux-9-arm64-ci-deps + - amazonlinux-2-ci-deps + - amazonlinux-2-arm64-ci-deps + - amazonlinux-2023-ci-deps + - amazonlinux-2023-arm64-ci-deps + - archlinux-lts-ci-deps + - centos-7-ci-deps + - centos-7-arm64-ci-deps + - centosstream-8-ci-deps + - centosstream-8-arm64-ci-deps + - centosstream-9-ci-deps + - centosstream-9-arm64-ci-deps + - debian-10-ci-deps + - debian-11-ci-deps + - debian-11-arm64-ci-deps + - debian-12-ci-deps + - debian-12-arm64-ci-deps + - fedora-37-ci-deps + - fedora-37-arm64-ci-deps + - fedora-38-ci-deps + - fedora-38-arm64-ci-deps + - opensuse-15-ci-deps + - photonos-3-ci-deps + - photonos-3-arm64-ci-deps + - photonos-4-ci-deps + - photonos-4-arm64-ci-deps + - photonos-5-ci-deps + - photonos-5-arm64-ci-deps + - ubuntu-2004-ci-deps + - ubuntu-2004-arm64-ci-deps + - ubuntu-2204-ci-deps + - ubuntu-2204-arm64-ci-deps - windows-2016 - windows-2019 - windows-2022 - macos-12 + - macos-13 + - macos-13-xlarge - almalinux-8 - almalinux-9 - amazonlinux-2 + - amazonlinux-2-arm64 + - amazonlinux-2023 + - amazonlinux-2023-arm64 - archlinux-lts - centos-7 - centosstream-8 @@ -1909,44 +3842,66 @@ jobs: - debian-10 - debian-11 - debian-11-arm64 - - fedora-36 + - debian-12 + - debian-12-arm64 - fedora-37 - fedora-38 - opensuse-15 - photonos-3 + - photonos-3-arm64 - photonos-4 + - photonos-4-arm64 + - photonos-5 + - photonos-5-arm64 - ubuntu-2004 - ubuntu-2004-arm64 - ubuntu-2204 - ubuntu-2204-arm64 - amazonlinux-2-pkg-tests + - amazonlinux-2-arm64-pkg-tests + - amazonlinux-2023-pkg-tests + - amazonlinux-2023-arm64-pkg-tests - centos-7-pkg-tests - centosstream-8-pkg-tests - centosstream-9-pkg-tests + - centosstream-9-arm64-pkg-tests - debian-10-pkg-tests - debian-11-pkg-tests - debian-11-arm64-pkg-tests + - debian-12-pkg-tests + - debian-12-arm64-pkg-tests + - photonos-3-pkg-tests + - photonos-3-arm64-pkg-tests + - photonos-4-pkg-tests + - photonos-4-arm64-pkg-tests + - photonos-5-pkg-tests + - photonos-5-arm64-pkg-tests - ubuntu-2004-pkg-tests - ubuntu-2004-arm64-pkg-tests - ubuntu-2204-pkg-tests - ubuntu-2204-arm64-pkg-tests - macos-12-pkg-tests + - macos-13-pkg-tests + - macos-13-xlarge-pkg-tests - windows-2016-nsis-pkg-tests - windows-2016-msi-pkg-tests - windows-2019-nsis-pkg-tests - windows-2019-msi-pkg-tests - windows-2022-nsis-pkg-tests - windows-2022-msi-pkg-tests + - pkg-download-tests environment: staging runs-on: - self-hosted - linux - repo-staging steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -1972,10 +3927,10 @@ jobs: echo "::add-mask::$TWINE_PASSWORD" echo "twine-password=$TWINE_PASSWORD" >> "${GITHUB_OUTPUT}" - - name: Download Source Repository + - name: Download PyPi Artifacts uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-staging-src-repo + name: pypi-artifacts path: artifacts/release - name: Publish to Test PyPi @@ -1989,17 +3944,26 @@ jobs: # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - check-requirements - prepare-workflow - pre-commit - lint - build-docs - - build-deps-onedir - - build-salt-onedir + - build-deps-onedir-linux + - build-deps-onedir-windows + - build-deps-onedir-macos + - build-salt-onedir-linux + - build-salt-onedir-windows + - build-salt-onedir-macos + - build-rpm-pkgs-src + - build-deb-pkgs-src + - build-windows-pkgs-src + - build-macos-pkgs-src - publish-repositories - upload-release-artifacts + - pkg-download-tests - publish-pypi steps: - name: Get workflow information diff --git a/.github/workflows/templates/build-ci-deps.yml.jinja b/.github/workflows/templates/build-ci-deps.yml.jinja new file mode 100644 index 000000000000..eb3622eb52f1 --- /dev/null +++ b/.github/workflows/templates/build-ci-deps.yml.jinja @@ -0,0 +1,86 @@ + + <%- for slug, display_name, arch in build_ci_deps_listing["windows"] %> + + <{ slug.replace(".", "") }>-ci-deps: + <%- do test_salt_needs.append(slug.replace(".", "") + "-ci-deps") %> + name: <{ display_name }> Deps + <%- if workflow_slug != 'release' %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + <%- endif %> + needs: + - prepare-workflow + <%- if workflow_slug != 'release' %> + - build-salt-onedir-windows + <%- else %> + - download-onedir-artifact + <%- endif %> + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: <{ slug }> + nox-session: ci-test-onedir + platform: windows + arch: amd64 + nox-version: <{ nox_version }> + python-version: "<{ gh_actions_workflows_python_version }>" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> + + <%- endfor %> + + + <%- for slug, display_name, arch in build_ci_deps_listing["macos"] %> + + <{ slug.replace(".", "") }>-ci-deps: + <%- do test_salt_needs.append(slug.replace(".", "") + "-ci-deps") %> + name: <{ display_name }> Deps + <%- if workflow_slug != 'release' %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} + <%- endif %> + needs: + - prepare-workflow + <%- if workflow_slug != 'release' %> + - build-salt-onedir-macos + <%- else %> + - download-onedir-artifact + <%- endif %> + uses: ./.github/workflows/build-deps-ci-action-macos.yml + with: + distro-slug: <{ slug }> + nox-session: ci-test-onedir + platform: darwin + arch: <{ arch }> + nox-version: <{ nox_version }> + python-version: "<{ gh_actions_workflows_python_version }>" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> + + <%- endfor %> + + + <%- for slug, display_name, arch in build_ci_deps_listing["linux"] %> + + <{ slug.replace(".", "") }>-ci-deps: + <%- do test_salt_needs.append(slug.replace(".", "") + "-ci-deps") %> + name: <{ display_name }> Deps + <%- if workflow_slug != 'release' %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-deps-ci'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + <%- endif %> + needs: + - prepare-workflow + <%- if workflow_slug != 'release' %> + - build-salt-onedir-linux + <%- else %> + - download-onedir-artifact + <%- endif %> + uses: ./.github/workflows/build-deps-ci-action.yml + with: + distro-slug: <{ slug }> + nox-session: ci-test-onedir + platform: linux + arch: <{ arch }> + nox-version: <{ nox_version }> + python-version: "<{ gh_actions_workflows_python_version }>" + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> + + <%- endfor %> diff --git a/.github/workflows/templates/build-deb-repo.yml.jinja b/.github/workflows/templates/build-deb-repo.yml.jinja index 55012c35137b..8d9c054405fc 100644 --- a/.github/workflows/templates/build-deb-repo.yml.jinja +++ b/.github/workflows/templates/build-deb-repo.yml.jinja @@ -3,33 +3,26 @@ fail-fast: false matrix: include: - - distro: debian - version: "10" - arch: x86_64 - - distro: debian - version: "10" - arch: aarch64 - - distro: debian - version: "11" - arch: x86_64 - - distro: debian - version: "11" - arch: aarch64 - - distro: ubuntu - version: "20.04" - arch: x86_64 - - distro: ubuntu - version: "20.04" - arch: aarch64 - - distro: ubuntu - version: "22.04" - arch: x86_64 - - distro: ubuntu - version: "22.04" - arch: aarch64 + <%- for distro, version, arch in ( + ("debian", "10", "x86_64"), + ("debian", "10", "aarch64"), + ("debian", "11", "x86_64"), + ("debian", "11", "aarch64"), + ("debian", "12", "x86_64"), + ("debian", "12", "aarch64"), + ("ubuntu", "20.04", "x86_64"), + ("ubuntu", "20.04", "aarch64"), + ("ubuntu", "22.04", "x86_64"), + ("ubuntu", "22.04", "aarch64"), + ) %> + - pkg-type: deb + distro: <{ distro }> + version: "<{ version }>" + arch: <{ arch }> + <%- endfor %> steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download System Dependencies run: | @@ -38,6 +31,14 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download DEB Packages uses: actions/download-artifact@v3 @@ -80,8 +81,8 @@ - name: Create Repository run: | - tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create deb --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-macos-repo.yml.jinja b/.github/workflows/templates/build-macos-repo.yml.jinja index 796e6da6906c..c8127d0ac3e8 100644 --- a/.github/workflows/templates/build-macos-repo.yml.jinja +++ b/.github/workflows/templates/build-macos-repo.yml.jinja @@ -1,9 +1,23 @@ + strategy: + fail-fast: false + matrix: + pkg-type: + - macos + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download macOS x86_64 Packages uses: actions/download-artifact@v3 @@ -11,6 +25,12 @@ name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-x86_64-macos path: artifacts/pkgs/incoming + - name: Download macOS Arch64 Packages + uses: actions/download-artifact@v3 + with: + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-aarch64-macos + path: artifacts/pkgs/incoming + - name: Setup GnuPG run: | sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg @@ -46,8 +66,8 @@ - name: Create Repository run: | - tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create macos --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact diff --git a/.github/workflows/templates/build-onedir-repo.yml.jinja b/.github/workflows/templates/build-onedir-repo.yml.jinja index fd7fafba9e97..8e51caa9c8ba 100644 --- a/.github/workflows/templates/build-onedir-repo.yml.jinja +++ b/.github/workflows/templates/build-onedir-repo.yml.jinja @@ -1,9 +1,23 @@ + strategy: + fail-fast: false + matrix: + pkg-type: + - onedir + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Linux x86_64 Onedir Archive uses: actions/download-artifact@v3 @@ -23,6 +37,12 @@ name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-darwin-x86_64.tar.xz path: artifacts/pkgs/incoming + - name: Download macOS arm64 Onedir Archive + uses: actions/download-artifact@v3 + with: + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-onedir-darwin-aarch64.tar.xz + path: artifacts/pkgs/incoming + - name: Download Windows amd64 Onedir Archive uses: actions/download-artifact@v3 with: @@ -82,8 +102,8 @@ - name: Create Repository run: | - tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create onedir --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact diff --git a/.github/workflows/templates/build-packages.yml.jinja b/.github/workflows/templates/build-packages.yml.jinja index a9cb4fad39a3..b5086a75e583 100644 --- a/.github/workflows/templates/build-packages.yml.jinja +++ b/.github/workflows/templates/build-packages.yml.jinja @@ -1,20 +1,38 @@ -<%- for pkg_type, display_name, runner_type in ( - ("rpm", "RPM", "self-hosted"), - ("deb", "DEB", "self-hosted"), - ("windows", "Windows", "github-hosted"), - ("macos", "macOS", "github-hosted"), - ) %> +<%- for platform, pkg_type, display_name, runner_type in ( + ("linux", "rpm", "RPM", "self-hosted"), + ("linux", "deb", "DEB", "self-hosted"), + ("windows", "windows", "Windows", "github-hosted"), + ("macos", "macos", "macOS", "github-hosted"), + ) %> - <%- set job_name = "build-{}-pkgs".format(pkg_type) %> + <%- for backend in ("onedir", "src") %> + <%- set job_name = "build-{}-pkgs-{}".format(pkg_type, backend) %> + <%- if backend == "src" %> + <%- do conclusion_needs.append(job_name) %> + <%- endif %> <{ job_name }>: - name: Build <{ display_name }> Packages + name: Build Packages if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['build-pkgs'] && fromJSON(needs.prepare-workflow.outputs.runners)['<{ runner_type }>'] }} needs: - prepare-workflow - - build-salt-onedir + - build-salt-onedir-<{ platform }> uses: ./.github/workflows/build-<{ pkg_type }>-packages.yml with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + relenv-version: "<{ relenv_version }>" + python-version: "<{ python_version }>" + source: "<{ backend }>" + <%- if pkg_type in ("macos", "windows") and gh_environment %> + environment: <{ gh_environment }> + <%- if pkg_type == "macos" %> + sign-packages: true + <%- else %> + sign-packages: <% if gh_environment == 'nightly' -%> false <%- else -%> ${{ inputs.sign-windows-packages }} <%- endif %> + <%- endif %> + secrets: inherit + <%- endif %> + <%- endfor %> <%- endfor %> diff --git a/.github/workflows/templates/build-repos.yml.jinja b/.github/workflows/templates/build-repos.yml.jinja index 85cea3e38db9..58c413cf5473 100644 --- a/.github/workflows/templates/build-repos.yml.jinja +++ b/.github/workflows/templates/build-repos.yml.jinja @@ -1,17 +1,17 @@ -<%- for type, display_name, needs_pkg in ( - ("src", "Source", False), - ("deb", "DEB", True), - ("rpm", "RPM", True), - ("windows", "Windows", True), - ("macos", "macOS", True), - ("onedir", "Onedir", False), - ) %> +<%- for type, display_name in ( + ("src", "Source"), + ("deb", "DEB"), + ("rpm", "RPM"), + ("windows", "Windows"), + ("macos", "macOS"), + ("onedir", "Onedir"), + ) %> <%- set job_name = "build-{}-repo".format(type) %> <%- do build_repo_needs.append(job_name) %> <{ job_name }>: - name: Build <{ display_name }> Repository + name: Build Repository environment: <{ gh_environment }> runs-on: - self-hosted @@ -19,11 +19,16 @@ - repo-<{ gh_environment }> needs: - prepare-workflow - <%- if needs_pkg %> - - build-<{ type }>-pkgs - <%- else %> - - build-salt-onedir + <%- if type not in ("src", "onedir") %> + - build-<{ type }>-pkgs-onedir + <%- elif type == 'onedir' %> + - build-salt-onedir-linux + - build-salt-onedir-macos + - build-salt-onedir-windows + <%- elif type == 'src' %> + - build-source-tarball <%- endif %> + <%- include "build-{}-repo.yml.jinja".format(type) %> <%- endfor %> diff --git a/.github/workflows/templates/build-rpm-repo.yml.jinja b/.github/workflows/templates/build-rpm-repo.yml.jinja index 3b440443a608..7e99a9686964 100644 --- a/.github/workflows/templates/build-rpm-repo.yml.jinja +++ b/.github/workflows/templates/build-rpm-repo.yml.jinja @@ -3,30 +3,49 @@ fail-fast: false matrix: include: - - distro: amazon - version: "2" - arch: x86_64 - - distro: redhat - version: "7" - arch: x86_64 - - distro: redhat - version: "8" - arch: x86_64 - - distro: redhat - version: "9" - arch: x86_64 - - distro: fedora - version: "36" - arch: x86_64 - - distro: fedora - version: "37" - arch: x86_64 - - distro: fedora - version: "38" - arch: x86_64 + <%- for distro, version, arch in ( + ("amazon", "2", "x86_64"), + ("amazon", "2", "arm64"), + ("amazon", "2", "aarch64"), + ("amazon", "2023", "x86_64"), + ("amazon", "2023", "arm64"), + ("amazon", "2023", "aarch64"), + ("redhat", "7", "x86_64"), + ("redhat", "7", "arm64"), + ("redhat", "7", "aarch64"), + ("redhat", "8", "x86_64"), + ("redhat", "8", "arm64"), + ("redhat", "8", "aarch64"), + ("redhat", "9", "x86_64"), + ("redhat", "9", "arm64"), + ("redhat", "9", "aarch64"), + ("fedora", "36", "x86_64"), + ("fedora", "36", "arm64"), + ("fedora", "36", "aarch64"), + ("fedora", "37", "x86_64"), + ("fedora", "37", "arm64"), + ("fedora", "37", "aarch64"), + ("fedora", "38", "x86_64"), + ("fedora", "38", "arm64"), + ("fedora", "38", "aarch64"), + ("photon", "3", "x86_64"), + ("photon", "3", "arm64"), + ("photon", "3", "aarch64"), + ("photon", "4", "x86_64"), + ("photon", "4", "arm64"), + ("photon", "4", "aarch64"), + ("photon", "5", "x86_64"), + ("photon", "5", "arm64"), + ("photon", "5", "aarch64"), + ) %> + - pkg-type: rpm + distro: <{ distro }> + version: "<{ version }>" + arch: <{ arch }> + <%- endfor %> steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Download System Dependencies run: | @@ -35,11 +54,19 @@ - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download RPM Packages uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch }}-rpm + name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}-rpm path: artifacts/pkgs/incoming - name: Setup GnuPG @@ -76,9 +103,16 @@ mkdir -p artifacts/pkgs/repo - name: Create Repository + env: + <%- if gh_environment == 'staging' %> + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + <%- endif %> + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create rpm --key-id=<{ gpg_key_id }> --distro-arch=${{ matrix.arch }} <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --distro=${{ matrix.distro }} --distro-version=${{ matrix.version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo diff --git a/.github/workflows/templates/build-src-repo.yml.jinja b/.github/workflows/templates/build-src-repo.yml.jinja index 4f7045d7df76..437da3308888 100644 --- a/.github/workflows/templates/build-src-repo.yml.jinja +++ b/.github/workflows/templates/build-src-repo.yml.jinja @@ -1,9 +1,23 @@ + strategy: + fail-fast: false + matrix: + pkg-type: + - src + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Source Tarball uses: actions/download-artifact@v3 @@ -46,18 +60,23 @@ - name: Create Repository run: | - tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create src --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo + - name: Copy Files For Source Only Artifact Uploads + run: | + mkdir artifacts/src + find artifacts/pkgs/repo -type f -print -exec cp {} artifacts/src \; + - name: Upload Standalone Repository As An Artifact uses: actions/upload-artifact@v3 with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo path: | - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* - artifacts/pkgs/repo/salt/py3/src/${{ needs.prepare-workflow.outputs.salt-version }}/*-GPG-* + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/src/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.* + artifacts/src/*-GPG-* retention-days: 7 if-no-files-found: error diff --git a/.github/workflows/templates/build-windows-repo.yml.jinja b/.github/workflows/templates/build-windows-repo.yml.jinja index 077b09f7520b..a86daf7f58bf 100644 --- a/.github/workflows/templates/build-windows-repo.yml.jinja +++ b/.github/workflows/templates/build-windows-repo.yml.jinja @@ -1,9 +1,23 @@ + strategy: + fail-fast: false + matrix: + pkg-type: + - windows + steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - name: Download Windows NSIS x86 Packages uses: actions/download-artifact@v3 @@ -64,8 +78,8 @@ - name: Create Repository run: | - tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build <%- endif %> \ - --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ + tools pkg repo create windows --key-id=<{ gpg_key_id }> <% if gh_environment == 'nightly' -%> --nightly-build-from=${{ github.ref_name }} <%- endif %> \ + --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} \ --incoming=artifacts/pkgs/incoming --repo-path=artifacts/pkgs/repo - name: Upload Repository As An Artifact diff --git a/.github/workflows/templates/check-workflow-run.yml.jinja b/.github/workflows/templates/check-workflow-run.yml.jinja deleted file mode 100644 index 3c0ea2a8b592..000000000000 --- a/.github/workflows/templates/check-workflow-run.yml.jinja +++ /dev/null @@ -1,45 +0,0 @@ -{#- This workflow will restart failed workflow runs. - We should stop using this workflow once we remove the flakyness from - Salt's test suite --#} ---- -name: <{ workflow_name }> - -on: - workflow_run: - types: - - completed - workflows: - <%- for workflow in check_workflows %> - - <{ workflow }> - <%- endfor %> - -permissions: - actions: write - -jobs: - restart-failed-workflow-runs: - name: "Restart Workflow (ID: ${{ github.event.workflow_run.id }}; Attempt: ${{ github.event.workflow_run.run_attempt }})" - if: ${{ github.event.workflow_run.conclusion == 'failure' }} - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Set up Python 3.10 - uses: actions/setup-python@v4 - id: setup-python - with: - python-version: "3.10" - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Pretty Print The GH Actions Event - run: - tools ci print-gh-event - - - name: Restart Workflow - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: | - tools ci rerun-workflow diff --git a/.github/workflows/templates/ci.yml.jinja b/.github/workflows/templates/ci.yml.jinja index f9a6cd461041..b02604c40d8c 100644 --- a/.github/workflows/templates/ci.yml.jinja +++ b/.github/workflows/templates/ci.yml.jinja @@ -1,12 +1,6 @@ <%- extends 'layout.yml.jinja' %> <%- set pre_commit_version = "3.0.4" %> -<%- block on %> -on: - push: {} - pull_request: {} -<%- endblock on %> - <%- block jobs %> <{- super() }> @@ -58,36 +52,26 @@ on: - x86_64 <%- else %> if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} - runs-on: - - ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} <%- endif %> needs: - prepare-workflow steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 + + <%- if not prepare_actual_release %> - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version + - name: Set up Python 3.10 + uses: actions/setup-python@v4 with: - python-binary: python3 + python-version: "3.10" + + <%- endif %> - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Cache Python Tools Docs Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/docs - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|docs|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/docs.txt') }} - - - name: Cache Python Tools Changelog Virtualenv - uses: actions/cache@v3 with: - path: .tools-venvs/changelog - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|changelog|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/changelog.txt') }} - + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-changelog - name: Setup Salt Version id: setup-salt-version @@ -98,28 +82,40 @@ on: release: true <%- endif %> - # TODO: Remove the --salt-version argument post 3006 release. This was to handle versioning - # issues on pre-3006 development versions on deb-based distros. - name: Update Debian changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" --draft - tools changelog update-deb "${{ needs.prepare-workflow.outputs.salt-version }}" + tools changelog update-deb --draft + tools changelog update-deb - name: Update RPM changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-rpm --draft tools changelog update-rpm - name: Update Release Notes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | + <%- if gh_environment == 'nightly' %> + if [ "${{ contains(fromJSON('["master"]'), github.ref_name) }}" == "true" ]; then + tools changelog update-release-notes --draft <%- if prepare_actual_release %> --release <%- endif %> --next-release + tools changelog update-release-notes <%- if prepare_actual_release %> --release <%- endif %> --next-release + else + tools changelog update-release-notes --draft <%- if prepare_actual_release %> --release <%- endif %> + tools changelog update-release-notes <%- if prepare_actual_release %> --release <%- endif %> + fi + <%- else %> tools changelog update-release-notes --draft <%- if prepare_actual_release %> --release <%- endif %> tools changelog update-release-notes <%- if prepare_actual_release %> --release <%- endif %> + <%- endif %> - name: Generate MAN Pages shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: LATEST_RELEASE: "${{ needs.prepare-workflow.outputs.salt-version }}" SALT_ON_SALTSTACK: "1" @@ -128,22 +124,26 @@ on: - name: Update Changelog shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | tools changelog update-changelog-md --draft tools changelog update-changelog-md - name: Show Changes Diff shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git diff --color - name: Configure Git shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git config --global user.name "Salt Project Packaging" git config --global user.email saltproject-packaging@vmware.com - name: Setup Pre-Commit + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} uses: ./.github/actions/setup-pre-commit with: version: "<{ pre_commit_version }>" @@ -151,8 +151,10 @@ on: - name: Commit Changes shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} env: SKIP: lint-salt,lint-tests + PRE_COMMIT_COLOR: always run: | # Run it twice so that pre-commit can fix anything that can be automatically fixed. git commit -am "Release v${{ needs.prepare-workflow.outputs.salt-version }}" || \ @@ -160,11 +162,13 @@ on: - name: Create release changes patch shell: bash + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} run: | git format-patch --keep-subject --binary --stdout HEAD^ > salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch - name: Upload Changes Diff Artifact uses: actions/upload-artifact@v3 + if: ${{ startsWith(github.event.ref, 'refs/tags') == false }} with: name: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch path: salt-${{ needs.prepare-workflow.outputs.salt-version }}.patch @@ -201,24 +205,19 @@ on: needs: - prepare-workflow - prepare-release - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "medium", "x86_64"]') || 'ubuntu-latest' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python 3.10 uses: actions/setup-python@v4 with: python-version: "3.10" - - name: Get Python Version - id: get-python-version - uses: ./.github/actions/get-python-version - with: - python-binary: python3 - - name: Setup Python Tools Scripts - id: python-tools-scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-build - name: Setup Salt Version id: setup-salt-version @@ -226,12 +225,6 @@ on: with: salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - - name: Cache Python Tools Build Virtualenv - uses: actions/cache@v3 - with: - path: .tools-venvs/build - key: ${{ needs.prepare-workflow.outputs.cache-seed }}|${{ github.workflow }}|${{ github.job }}|tools-venvs|${{ steps.python-tools-scripts.outputs.version }}|build|${{ steps.get-python-version.outputs.version }}|${{ hashFiles('requirements/**/build.txt') }} - - name: Build Source Tarball uses: ./.github/actions/build-source-tarball with: @@ -242,49 +235,51 @@ on: <%- set job_name = "build-deps-onedir" %> <%- if includes.get(job_name, True) %> + <%- for platform in ("linux", "windows", "macos") %> + <%- set platform_job_name = "{}-{}".format(job_name, platform) %> - <{ job_name }>: - <%- do conclusion_needs.append(job_name) %> + <{ platform_job_name }>: + <%- do conclusion_needs.append(platform_job_name) %> name: Build Dependencies Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - uses: ./.github/workflows/build-deps-onedir.yml + uses: ./.github/workflows/build-deps-onedir-<{ platform }>.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "<{ relenv_version }>" - python-version-linux: "<{ python_version_linux }>" - python-version-macos: "<{ python_version_macos }>" - python-version-windows: "<{ python_version_windows }>" + python-version: "<{ python_version }>" + <%- endfor %> <%- endif %> <%- set job_name = "build-salt-onedir" %> <%- if includes.get(job_name, True) %> + <%- for platform in ("linux", "windows", "macos") %> + <%- set platform_job_name = "{}-{}".format(job_name, platform) %> - <{ job_name }>: - <%- do conclusion_needs.append(job_name) %> + <{ platform_job_name }>: + <%- do conclusion_needs.append(platform_job_name) %> name: Build Salt Onedir if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['<{ job_name }>'] }} needs: - prepare-workflow - - build-deps-onedir + - build-deps-onedir-<{ platform }> - build-source-tarball - uses: ./.github/workflows/build-salt-onedir.yml + uses: ./.github/workflows/build-salt-onedir-<{ platform }>.yml with: cache-seed: ${{ needs.prepare-workflow.outputs.cache-seed }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" self-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} github-hosted-runners: ${{ fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} relenv-version: "<{ relenv_version }>" - python-version-linux: "<{ python_version_linux }>" - python-version-macos: "<{ python_version_macos }>" - python-version-windows: "<{ python_version_windows }>" + python-version: "<{ python_version }>" + <%- endfor %> <%- endif %> @@ -293,16 +288,148 @@ on: <%- include "build-packages.yml.jinja" %> <%- endif %> + <%- set pkg_tests_job_name = "pkg-tests" %> + <%- set salt_tests_job_name = "salt-tests" %> + <%- if includes.get(pkg_tests_job_name, True) or includes.get(salt_tests_job_name, True) %> + <%- include "build-ci-deps.yml.jinja" %> + <%- endif %> - <%- set job_name = "pkg-tests" %> - <%- if includes.get(job_name, True) %> + <%- if includes.get(pkg_tests_job_name, True) %> <%- include "test-salt-pkg.yml.jinja" %> <%- endif %> - <%- set job_name = "salt-tests" %> - <%- if includes.get(job_name, True) %> + <%- if includes.get(salt_tests_job_name, True) %> <%- include "test-salt.yml.jinja" %> <%- endif %> + <%- if skip_test_coverage_check == "false" or "skip_code_coverage" in skip_test_coverage_check %> + + combine-all-code-coverage: + <%- do conclusion_needs.append("combine-all-code-coverage") %> + name: Combine Code Coverage + if: ${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] == false }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + needs: + - prepare-workflow + <%- for need in test_salt_needs.iter(consume=False) %> + - <{ need }> + <%- endfor %> + steps: + - uses: actions/checkout@v4 + + - name: Set up Python 3.10 + if: ${{ github.event.repository.private == false }} + uses: actions/setup-python@v4 + with: + python-version: "3.10" + + - name: Setup Python Tools Scripts + id: python-tools-scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}-coverage + + - name: Install Nox + run: | + python3 -m pip install 'nox==<{ nox_version }>' + +{# We can't yet use tokenless uploads with the codecov CLI + + - name: Install Codecov CLI + run: | + python3 -m pip install codecov-cli + + - name: Tell Codecov To Process Reports + run: | + codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + send-notifications --git-service github --sha ${{ github.sha }} + +#} + + - name: Get coverage reports + id: get-coverage-reports + uses: actions/download-artifact@v3 + with: + name: all-testrun-coverage-artifacts + path: artifacts/coverage/ + + - name: Display structure of downloaded files + run: tree -a artifacts/ + + - name: Install Codecov CLI + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + mv ./codecov /usr/local/bin/ + + - name: Create XML Coverage Reports + run: | + nox --force-color -e create-xml-coverage-reports + + - name: Upload Code Coverage To Codecov + run: | + tools ci upload-coverage --commit-sha=${{ github.event.pull_request.head.sha || github.sha }} artifacts/coverage/ + + - name: Combine Code Coverage + run: | + nox --force-color -e combine-coverage + + - name: Report Salt Code Coverage + run: | + nox --force-color -e coverage-report -- salt + + - name: Create Salt Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report -- salt + + - name: Create Salt Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report -- salt + + - name: Upload Salt Code Coverage HTML Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-salt-html-report + path: artifacts/coverage/html/salt + retention-days: 7 + if-no-files-found: error + + - name: Report Combined Code Coverage + run: | + nox --force-color -e coverage-report + + - name: Create Combined Code Coverage JSON Report + run: | + nox --force-color -e create-json-coverage-reports + + - name: Upload Combined Code Coverage JSON Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-full-json-report + path: artifacts/coverage/coverage.json + retention-days: 7 + if-no-files-found: error + + - name: Create Combined Code Coverage HTML Report + run: | + nox --force-color -e create-html-coverage-report + + - name: Upload Combined Code Coverage HTML Report + uses: actions/upload-artifact@v3 + with: + name: code-coverage-full-html-report + path: artifacts/coverage/html/full + retention-days: 7 + if-no-files-found: error + <%- endif %> + <%- endblock jobs %> diff --git a/.github/workflows/templates/layout.yml.jinja b/.github/workflows/templates/layout.yml.jinja index 85b321d17a98..5934eb8d579b 100644 --- a/.github/workflows/templates/layout.yml.jinja +++ b/.github/workflows/templates/layout.yml.jinja @@ -3,27 +3,30 @@ <%- set prepare_workflow_if_check = prepare_workflow_if_check|default(False) %> <%- set prepare_workflow_skip_test_suite = prepare_workflow_skip_test_suite|default("") %> <%- set prepare_workflow_skip_pkg_test_suite = prepare_workflow_skip_pkg_test_suite|default("") %> +<%- set prepare_workflow_skip_pkg_download_test_suite = prepare_workflow_skip_pkg_download_test_suite|default("") %> <%- set prepare_workflow_salt_version_input = prepare_workflow_salt_version_input|default("") %> -<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ github.event_name == 'pull_request' }}") %> +<%- set skip_test_coverage_check = skip_test_coverage_check|default("${{ fromJSON(needs.prepare-workflow.outputs.testrun)['skip_code_coverage'] }}") %> <%- set skip_junit_reports_check = skip_junit_reports_check|default("${{ github.event_name == 'pull_request' }}") %> -<%- set python_version_linux = "3.10.10" %> -<%- set python_version_macos = "3.10.10" %> -<%- set python_version_windows = "3.10.10" %> -<%- set relenv_version = "0.7.0" %> <%- set gpg_key_id = "64CBBC8173D76B3F" %> <%- set prepare_actual_release = prepare_actual_release | default(False) %> -<%- set release_branches = ["master", "3006.x"] %> +<%- set gh_actions_workflows_python_version = "3.10" %> --- <%- block name %> name: <{ workflow_name }> -run-name: "<{ workflow_name }> (${{ github.event_name == 'pull_request' && format('PR: #{0}', github.event.number) || format('Branch: {0}', github.ref_name) }})" +run-name: "<{ workflow_name }> (${{ github.event_name == 'pull_request' && format('pr: #{0}', github.event.number) || format('{0}: {1}', startsWith(github.event.ref, 'refs/tags') && 'tag' || 'branch', github.ref_name) }})" <%- endblock name %> <%- block on %> on: push: {} - pull_request: {} + pull_request: + types: + - labeled + - unlabeled + - opened + - reopened + - synchronize <%- endblock on %> @@ -31,7 +34,7 @@ on: env: COLUMNS: 190 - CACHE_SEED: SEED-2 # Bump the number to invalidate all caches + CACHE_SEED: SEED-7 # Bump the number to invalidate all caches RELENV_DATA: "${{ github.workspace }}/.relenv" <%- endblock env %> @@ -41,6 +44,9 @@ env: permissions: contents: read # for dorny/paths-filter to fetch a list of changed files pull-requests: read # for dorny/paths-filter to read pull requests +<%- if workflow_slug not in ("nightly", "scheduled") %> + actions: read # for technote-space/workflow-conclusion-action to get the job statuses +<%- endif %> <%- endblock permissions %> @@ -69,7 +75,7 @@ jobs: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} <%- if prepare_workflow_if_check %> if: <{ prepare_workflow_if_check }> <%- endif %> @@ -83,11 +89,15 @@ jobs: jobs: ${{ steps.define-jobs.outputs.jobs }} runners: ${{ steps.runner-types.outputs.runners }} changed-files: ${{ steps.process-changed-files.outputs.changed-files }} + pull-labels: ${{ steps.get-pull-labels.outputs.labels }} testrun: ${{ steps.define-testrun.outputs.testrun }} salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} + testing-releases: ${{ steps.get-testing-releases.outputs.testing-releases }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Full clone to also get the tags to get the right salt version @@ -133,9 +143,11 @@ jobs: - *doc_requirements workflows: - added|modified: + - cicd/shared-gh-workflows-context.yml - .github/actions/**/action.yml - .github/workflows/*.yml - .github/workflows/templates/*.yml.jinja2 + - tools/precommit/workflows.py salt: - added|modified: &salt_added_modified - setup.py @@ -173,11 +185,18 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: tools ci print-gh-event + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} + - name: Setup Salt Version id: setup-salt-version uses: ./.github/actions/setup-salt-version @@ -185,11 +204,25 @@ jobs: salt-version: "<{ prepare_workflow_salt_version_input }>" validate-version: true + - name: Get Pull Request Test Labels + id: get-pull-labels + if: ${{ github.event_name == 'pull_request'}} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-pr-test-labels --repository ${{ github.repository }} + <%- if prepare_actual_release %> - name: Check Existing Releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + if [ "${{ github.event.repository.private }}" = "true" ]; then + tools pkg repo confirm-unreleased --repository saltstack/salt ${{ steps.setup-salt-version.outputs.salt-version }} + fi + <%- endif %> @@ -224,12 +257,34 @@ jobs: - name: Define Jobs id: define-jobs run: | - tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{ prepare_workflow_skip_pkg_test_suite }> ${{ github.event_name }} changed-files.json + tools ci define-jobs<{ prepare_workflow_skip_test_suite }><{ + prepare_workflow_skip_pkg_test_suite }><{ prepare_workflow_skip_pkg_download_test_suite + }> ${{ github.event_name }} changed-files.json - name: Check Defined Jobs run: | echo '${{ steps.define-jobs.outputs.jobs }}' | jq -C '.' + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Get Latest Salt Releases for Testing + id: get-testing-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-testing-releases ${{ join(fromJSON(steps.get-salt-releases.outputs.releases), ' ') }} --salt-version ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + echo '${{ steps.get-testing-releases.outputs.testing-releases }}' | jq -C '.' + - name: Define Testrun id: define-testrun run: | @@ -251,10 +306,27 @@ jobs: name: testrun-changed-files.txt path: testrun-changed-files.txt - - name: Set Cache Seed Output - id: set-cache-seed +{# We can't yet use tokenless uploads with the codecov CLI + + - name: Install Codecov CLI + if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }} + run: | + python3 -m pip install codecov-cli + + - name: Save Commit Metadata In Codecov + if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }} run: | - echo "cache-seed=${{ env.CACHE_SEED }}" >> "$GITHUB_OUTPUT" + codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + create-commit --git-service github --sha ${{ github.sha }} + + - name: Create Codecov Coverage Report + if: ${{ fromJSON(steps.define-testrun.outputs.testrun)['skip_code_coverage'] == false }} + run: | + codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + create-report --git-service github --sha ${{ github.sha }} + +#} + <%- endblock prepare_workflow_job %> <%- endif %> @@ -265,7 +337,10 @@ jobs: # on a pull request instead of requiring all name: Set the ${{ github.workflow }} Pipeline Exit Status if: always() - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + <%- if workflow_slug == "nightly" %> + environment: <{ workflow_slug }> + <%- endif %> needs: <%- for need in prepare_workflow_needs.iter(consume=True) %> - <{ need }> @@ -287,6 +362,9 @@ jobs: id: get-workflow-info uses: technote-space/workflow-conclusion-action@v3 + <%- block set_pipeline_exit_status_extra_steps %> + <%- endblock set_pipeline_exit_status_extra_steps %> + - name: Set Pipeline Exit Status shell: bash run: | diff --git a/.github/workflows/templates/nightly.yml.jinja b/.github/workflows/templates/nightly.yml.jinja index eabec06bea91..e4350f44a366 100644 --- a/.github/workflows/templates/nightly.yml.jinja +++ b/.github/workflows/templates/nightly.yml.jinja @@ -1,23 +1,34 @@ <%- set gh_environment = gh_environment|default("nightly") %> <%- set skip_test_coverage_check = skip_test_coverage_check|default("false") %> <%- set skip_junit_reports_check = skip_junit_reports_check|default("false") %> +<%- set prepare_workflow_skip_test_suite = "${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}" %> +<%- set prepare_workflow_skip_pkg_test_suite = "${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}" %> <%- set prepare_workflow_if_check = prepare_workflow_if_check|default("${{ fromJSON(needs.workflow-requirements.outputs.requirements-met) }}") %> <%- extends 'ci.yml.jinja' %> <%- block name %> name: <{ workflow_name }> -run-name: "<{ workflow_name }> (${{ format('Branch: {0}', github.ref_name) }})" +run-name: "<{ workflow_name }> (branch: ${{ github.ref_name }})" <%- endblock name %> <%- block on %> on: - workflow_dispatch: {} + workflow_dispatch: + inputs: + skip-salt-test-suite: + type: boolean + default: false + description: Skip running the Salt test suite. + skip-salt-pkg-test-suite: + type: boolean + default: false + description: Skip running the Salt packages test suite. schedule: # https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#onschedule - - cron: '0 1 * * *' # Every day at 1AM + - cron: '0 0 * * *' # Every day at 0AM <%- endblock on %> @@ -41,6 +52,81 @@ concurrency: <%- include "workflow-requirements-check.yml.jinja" %> <%- include "trigger-branch-workflows.yml.jinja" %> + {#- When we start using a slack app, we can update messages, not while using incoming webhooks + <%- if workflow_slug == "nightly" %> + + <%- do conclusion_needs.append('notify-slack') %> + notify-slack: + name: Notify Slack + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} + environment: <{ gh_environment }> + needs: + <%- for need in prepare_workflow_needs.iter(consume=False) %> + - <{ need }> + <%- endfor %> + outputs: + update-ts: ${{ steps.slack.outputs.update-ts }} + steps: + - name: Notify Slack + id: slack + uses: slackapi/slack-github-action@v1.24.0 + with: + payload: | + { + "attachments": [ + { + "color": "ffca28", + "fields": [ + { + "title": "Workflow", + "short": true, + "value": "${{ github.workflow }}", + "type": "mrkdwn" + }, + { + "title": "Workflow Run", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>", + "type": "mrkdwn" + }, + { + "title": "Branch", + "short": true, + "value": "${{ github.ref_name }}", + "type": "mrkdwn" + }, + { + "title": "Commit", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>", + "type": "mrkdwn" + }, + { + "title": "Attempt", + "short": true, + "value": "${{ github.run_attempt }}", + "type": "mrkdwn" + }, + { + "title": "Status", + "short": true, + "value": "running", + "type": "mrkdwn" + } + ], + "author_name": "${{ github.event.sender.login }}", + "author_link": "${{ github.event.sender.html_url }}", + "author_icon": "${{ github.event.sender.avatar_url }}" + } + ] + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + + <%- endif %> + #} + <%- endblock pre_jobs %> <%- block jobs %> @@ -53,6 +139,7 @@ concurrency: publish-repositories: <%- do conclusion_needs.append('publish-repositories') %> name: Publish Repositories + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -70,7 +157,7 @@ concurrency: <%- endif %> steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -80,6 +167,8 @@ concurrency: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Repository Artifact uses: actions/download-artifact@v3 @@ -97,10 +186,80 @@ concurrency: tree -a artifacts/pkgs/repo/ - name: Upload Repository Contents (<{ gh_environment }>) + env: + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo publish <{ gh_environment }> - <%- if gh_environment in ("staging", "release") %> \ - ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} - <%- endif %> artifacts/pkgs/repo/ + tools pkg repo publish <{ gh_environment }> --salt-version=${{ needs.prepare-workflow.outputs.salt-version }} artifacts/pkgs/repo/ <%- endblock jobs %> + +<%- block set_pipeline_exit_status_extra_steps %> + + <%- if workflow_slug == "nightly" %> + + - name: Notify Slack + id: slack + if: always() + uses: slackapi/slack-github-action@v1.24.0 + with: + {#- When we start using a slack app, we can update messages, not while using incoming webhooks + update-ts: ${{ needs.notify-slack.outputs.update-ts }} + #} + payload: | + { + "attachments": [ + { + "fallback": "${{ github.workflow }} Workflow build result for the `${{ github.ref_name }}` branch(attempt: ${{ github.run_attempt }}): `${{ steps.get-workflow-info.outputs.conclusion }}`\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}", + "color": "${{ steps.get-workflow-info.outputs.conclusion != 'success' && 'ff3d00' || '00e676' }}", + "fields": [ + { + "title": "Workflow", + "short": true, + "value": "${{ github.workflow }}", + "type": "mrkdwn" + }, + { + "title": "Workflow Run", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|${{ github.run_id }}>", + "type": "mrkdwn" + }, + { + "title": "Branch", + "short": true, + "value": "${{ github.ref_name }}", + "type": "mrkdwn" + }, + { + "title": "Commit", + "short": true, + "value": "<${{ github.server_url }}/${{ github.repository }}/commit/${{ github.sha }}|${{ github.sha }}>", + "type": "mrkdwn" + }, + { + "title": "Attempt", + "short": true, + "value": "${{ github.run_attempt }}", + "type": "mrkdwn" + }, + { + "title": "Status", + "short": true, + "value": "${{ steps.get-workflow-info.outputs.conclusion }}", + "type": "mrkdwn" + } + ], + "author_name": "${{ github.event.sender.login }}", + "author_link": "${{ github.event.sender.html_url }}", + "author_icon": "${{ github.event.sender.avatar_url }}" + } + ] + } + env: + SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} + SLACK_WEBHOOK_TYPE: INCOMING_WEBHOOK + + <%- endif %> + +<%- endblock set_pipeline_exit_status_extra_steps %> diff --git a/.github/workflows/templates/release.yml.jinja b/.github/workflows/templates/release.yml.jinja index 0e36e4c18c61..b020138baff8 100644 --- a/.github/workflows/templates/release.yml.jinja +++ b/.github/workflows/templates/release.yml.jinja @@ -1,4 +1,5 @@ <%- set prepare_workflow_salt_version_input = "${{ inputs.salt-version }}" %> +<%- set prepare_workflow_skip_pkg_download_test_suite = "${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }}" %> <%- set gh_environment = "release" %> <%- extends 'layout.yml.jinja' %> @@ -6,7 +7,7 @@ <%- block name %> name: <{ workflow_name }> -run-name: "<{ workflow_name }> (${{ format('Branch: {0} // Version: {1}', github.ref_name, inputs.salt-version) }})" +run-name: "<{ workflow_name }> (branch: ${{ github.ref_name }}; version: ${{ inputs.salt-version }})" <%- endblock name %> @@ -22,14 +23,13 @@ on: description: > The Salt version to get from staging to publish the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + skip-salt-pkg-download-test-suite: + type: boolean + default: false + description: Skip running the Salt packages download test suite. <%- endblock on %> -<%- block env %> - <{- super() }> - REPO_BASE_URL: "https://${{ secrets.SALT_REPO_DOMAIN }}" -<%- endblock env %> - <%- block concurrency %> concurrency: @@ -52,7 +52,7 @@ permissions: <{ job_name }>: <%- do prepare_workflow_needs.append(job_name) %> name: Check Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: <{ gh_environment }>-check steps: - name: Check For Admin Permission @@ -61,17 +61,6 @@ permissions: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: <{ release_branches|join(', ') }>" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - <%- endblock pre_jobs %> @@ -81,7 +70,11 @@ permissions: prepare-workflow: name: Prepare Workflow Run - runs-on: ubuntu-latest + runs-on: + - self-hosted + - linux + - repo-<{ gh_environment }> + environment: <{ gh_environment }> <%- if prepare_workflow_needs %> needs: <%- for need in prepare_workflow_needs.iter(consume=False) %> @@ -90,8 +83,11 @@ permissions: <%- endif %> outputs: salt-version: ${{ steps.setup-salt-version.outputs.salt-version }} + cache-seed: ${{ steps.set-cache-seed.outputs.cache-seed }} + latest-release: ${{ steps.get-salt-releases.outputs.latest-release }} + releases: ${{ steps.get-salt-releases.outputs.releases }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 with: fetch-depth: 0 # Full clone to also get the tags to get the right salt version @@ -102,6 +98,8 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ env.CACHE_SEED }} - name: Pretty Print The GH Actions Event run: @@ -115,8 +113,36 @@ permissions: validate-version: true - name: Check Existing Releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | tools pkg repo confirm-unreleased --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + if [ "${{ github.event.repository.private }}" = "true" ]; then + tools pkg repo confirm-unreleased --repository saltstack/salt ${{ steps.setup-salt-version.outputs.salt-version }} + fi + + - name: Check Release Staged + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools pkg repo confirm-staged --repository ${{ github.repository }} ${{ steps.setup-salt-version.outputs.salt-version }} + + - name: Get Salt Releases + id: get-salt-releases + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: | + tools ci get-releases + + - name: Check Salt Releases + run: | + echo '${{ steps.get-salt-releases.outputs.latest-release }}' | jq -C '.' + echo '${{ steps.get-salt-releases.outputs.releases }}' | jq -C '.' + + - name: Set Cache Seed Output + id: set-cache-seed + run: | + tools ci define-cache-seed ${{ env.CACHE_SEED }} <%- endblock prepare_workflow_job %> <%- endif %> @@ -124,6 +150,59 @@ permissions: <%- block jobs %> <{- super() }> + download-onedir-artifact: + name: Download Staging Onedir Artifact + runs-on: + - self-hosted + - linux + - repo-<{ gh_environment }> + environment: <{ gh_environment }> + needs: + - prepare-workflow + strategy: + fail-fast: false + matrix: + include: + - platform: linux + arch: x86_64 + - platform: linux + arch: aarch64 + - platform: windows + arch: amd64 + - platform: windows + arch: x86 + - platform: darwin + arch: x86_64 + - platform: darwin + arch: aarch64 + steps: + - uses: actions/checkout@v4 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Download Onedir Tarball Artifact + run: | + tools release download-onedir-artifact --platform=${{ matrix.platform }} --arch=${{ matrix.arch }} ${{ inputs.salt-version }} + + - name: Upload Onedir Tarball as an Artifact + uses: actions/upload-artifact@v3 + with: + name: salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz + path: artifacts/salt-${{ inputs.salt-version }}-onedir-${{ matrix.platform }}-${{ matrix.arch }}.tar.xz* + retention-days: 7 + if-no-files-found: error + + <%- include "build-ci-deps.yml.jinja" %> + backup: name: Backup runs-on: @@ -133,15 +212,25 @@ permissions: needs: - prepare-workflow environment: <{ gh_environment }> + outputs: + backup-complete: ${{ steps.backup.outputs.backup-complete }} steps: - name: Clone The Salt Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 + + - name: Setup Rclone + uses: AnimMouse/setup-rclone@v1 + with: + version: v1.61.1 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Backup Previous Releases + id: backup run: | tools pkg repo backup-previous-releases @@ -155,11 +244,12 @@ permissions: needs: - prepare-workflow - backup + - download-onedir-artifact environment: <{ gh_environment }> steps: - name: Clone The Salt Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -169,20 +259,24 @@ permissions: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Publish Release Repository + env: + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} run: | - tools pkg repo publish release \ - ${{ contains(needs.prepare-workflow.outputs.salt-version, 'rc') && '--rc-build' || '' }} \ - --key-id=<{ gpg_key_id }> ${{ needs.prepare-workflow.outputs.salt-version }} + tools pkg repo publish <{ gh_environment }> ${{ needs.prepare-workflow.outputs.salt-version }} - <%- if includes.get("test-pkg-uploads", True) %> - <%- include "test-pkg-repo-uploads.yml.jinja" %> + <%- if includes.get("test-pkg-downloads", True) %> + <%- include "test-salt-pkg-repo-downloads.yml.jinja" %> <%- endif %> release: <%- do conclusion_needs.append('release') %> name: Release v${{ needs.prepare-workflow.outputs.salt-version }} + if: ${{ always() && ! failure() && ! cancelled() }} runs-on: - self-hosted - linux @@ -197,12 +291,14 @@ permissions: environment: <{ gh_environment }> steps: - name: Clone The Salt Repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 with: ssh-key: ${{ secrets.GHA_SSH_KEY }} - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -263,6 +359,7 @@ permissions: ssh: true tags: true atomic: true + branch: ${{ github.ref }} - name: Create Github Release uses: ncipollo/release-action@v1.12.0 @@ -279,10 +376,120 @@ permissions: replacesArtifacts: true tag: v${{ needs.prepare-workflow.outputs.salt-version }} + - name: Upload PyPi Artifacts + uses: actions/upload-artifact@v3 + with: + name: pypi-artifacts + path: | + release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc + retention-days: 7 + if-no-files-found: error + + {#- Disable automatic backup restore + restore: + <%- do conclusion_needs.append('restore') %> + name: Restore Release Bucket From Backup + if: ${{ always() && needs.backup.outputs.backup-complete == 'true' && (failure() || cancelled()) }} + runs-on: + - self-hosted + - linux + - repo-<{ gh_environment }> + needs: + - backup + - release + <%- for need in test_repo_needs.iter(consume=True) %> + - <{ need }> + <%- endfor %> + environment: <{ gh_environment }> + steps: + - name: Clone The Salt Repository + uses: actions/checkout@v4 + with: + ssh-key: ${{ secrets.GHA_SSH_KEY }} + + - name: Setup Rclone + uses: AnimMouse/setup-rclone@v1 + with: + version: v1.61.1 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Restore Release Bucket + run: | + tools pkg repo restore-previous-releases + #} + + publish-pypi: + <%- do conclusion_needs.append('publish-pypi') %> + name: Publish to PyPi + if: ${{ always() && ! failure() && ! cancelled() && github.event.repository.fork != true }} + needs: + - prepare-workflow + - release {#- Disable automatic backup restore + - restore #} + environment: <{ gh_environment }> + runs-on: + - self-hosted + - linux + - repo-<{ gh_environment }> + steps: + - uses: actions/checkout@v4 + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} + + - name: Setup GnuPG + run: | + sudo install -d -m 0700 -o "$(id -u)" -g "$(id -g)" /run/gpg + GNUPGHOME="$(mktemp -d -p /run/gpg)" + echo "GNUPGHOME=${GNUPGHOME}" >> "$GITHUB_ENV" + cat < "${GNUPGHOME}/gpg.conf" + batch + no-tty + pinentry-mode loopback + EOF + + - name: Get Secrets + id: get-secrets + env: + SECRETS_KEY: ${{ secrets.SECRETS_KEY }} + run: | + SECRETS_KEY_FILE=$(mktemp /tmp/output.XXXXXXXXXX) + echo "$SECRETS_KEY" > "$SECRETS_KEY_FILE" + TWINE_PASSWORD=$(aws --region us-west-2 secretsmanager get-secret-value --secret-id /cmbu-saltstack/publishing/publish-pypi \ + --query SecretString --output text | jq .default_passphrase -r | base64 -d \ + | gpg --passphrase-file "$SECRETS_KEY_FILE" -d -) + echo "::add-mask::$TWINE_PASSWORD" + echo "twine-password=$TWINE_PASSWORD" >> "${GITHUB_OUTPUT}" + + - name: Download PyPi Artifacts + uses: actions/download-artifact@v3 + with: + name: pypi-artifacts + path: artifacts/release + - name: Publish to PyPi env: TWINE_PASSWORD: "${{ steps.get-secrets.outputs.twine-password }}" run: | - tools pkg pypi-upload release-artifacts/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + tools pkg pypi-upload artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz <%- endblock jobs %> + +<%- block set_pipeline_exit_status_extra_steps %> + + - run: | + # shellcheck disable=SC2129 + if [ "${{ steps.get-workflow-info.outputs.conclusion }}" != "success" ]; then + echo 'To restore the release bucket run:' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + echo 'tools pkg repo restore-previous-releases' >> "${GITHUB_STEP_SUMMARY}" + echo '```' >> "${GITHUB_STEP_SUMMARY}" + fi +<%- endblock set_pipeline_exit_status_extra_steps %> diff --git a/.github/workflows/templates/scheduled.yml.jinja b/.github/workflows/templates/scheduled.yml.jinja index 377d76caffb2..4c78a978a44c 100644 --- a/.github/workflows/templates/scheduled.yml.jinja +++ b/.github/workflows/templates/scheduled.yml.jinja @@ -7,7 +7,7 @@ <%- block name %> name: <{ workflow_name }> -run-name: "<{ workflow_name }> (${{ format('Branch: {0}', github.ref_name) }})" +run-name: "<{ workflow_name }> (branch: ${{ github.ref_name }})" <%- endblock name %> diff --git a/.github/workflows/templates/staging.yml.jinja b/.github/workflows/templates/staging.yml.jinja index a4bfebb2b730..c84ade076364 100644 --- a/.github/workflows/templates/staging.yml.jinja +++ b/.github/workflows/templates/staging.yml.jinja @@ -2,6 +2,7 @@ <%- set prepare_workflow_salt_version_input = "${{ inputs.salt-version }}" %> <%- set prepare_workflow_skip_test_suite = "${{ inputs.skip-salt-test-suite && ' --skip-tests' || '' }}" %> <%- set prepare_workflow_skip_pkg_test_suite = "${{ inputs.skip-salt-pkg-test-suite && ' --skip-pkg-tests' || '' }}" %> +<%- set prepare_workflow_skip_pkg_download_test_suite = "${{ inputs.skip-salt-pkg-download-test-suite && ' --skip-pkg-download-tests' || '' }}" %> <%- set gh_environment = "staging" %> <%- set prepare_actual_release = True %> <%- set skip_test_coverage_check = "true" %> @@ -11,7 +12,7 @@ <%- block name %> name: <{ workflow_name }> -run-name: "<{ workflow_name }> (${{ format('Branch: {0} // Version: {1}', github.ref_name, inputs.salt-version) }})" +run-name: "<{ workflow_name }> (branch: ${{ github.ref_name }}; version: ${{ inputs.salt-version }})" <%- endblock name %> @@ -26,6 +27,14 @@ on: description: > The Salt version to set prior to building packages and staging the release. (DO NOT prefix the version with a v, ie, 3006.0 NOT v3006.0). + sign-windows-packages: + type: boolean + default: false + description: Sign Windows Packages + skip-test-pypi-publish: + type: boolean + default: false + description: Skip publishing the source package to Test PyPi(For example, CVE releases) skip-salt-test-suite: type: boolean default: false @@ -34,14 +43,13 @@ on: type: boolean default: false description: Skip running the Salt packages test suite. + skip-salt-pkg-download-test-suite: + type: boolean + default: false + description: Skip running the Salt packages download test suite. <%- endblock on %> -<%- block env %> - <{- super() }> - REPO_BASE_URL: "https://${{ secrets.SALT_REPO_USER }}:${{ secrets.SALT_REPO_PASS }}@${{ secrets.SALT_REPO_DOMAIN }}" -<%- endblock env %> - <%- block concurrency %> concurrency: @@ -58,7 +66,7 @@ concurrency: <{ job_name }>: <%- do prepare_workflow_needs.append(job_name) %> name: Check Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} environment: <{ gh_environment }>-check steps: - name: Check For Admin Permission @@ -67,17 +75,6 @@ concurrency: require: admin username: ${{ github.triggering_actor }} - - name: Check Branch - run: | - echo "Trying to run the staging workflow from branch ${{ github.ref_name }}" - if [ "${{ contains(fromJSON('<{ release_branches|tojson }>'), github.ref_name) }}" != "true" ]; then - echo "Running the staging workflow from the ${{ github.ref_name }} branch is not allowed" - echo "Allowed branches: <{ release_branches|join(', ') }>" - exit 1 - else - echo "Allowed to release from branch ${{ github.ref_name }}" - fi - <%- endblock pre_jobs %> @@ -97,7 +94,7 @@ concurrency: - linux - repo-<{ gh_environment }> steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -107,6 +104,8 @@ concurrency: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Download Release Patch uses: actions/download-artifact@v3 @@ -132,6 +131,10 @@ concurrency: name: Salt-${{ needs.prepare-workflow.outputs.salt-version }}.epub path: artifacts/release + - name: Show Release Artifacts + run: | + tree -a artifacts/release + {#- - name: Download Release Documentation (PDF) @@ -146,13 +149,24 @@ concurrency: run: | tools release upload-artifacts ${{ needs.prepare-workflow.outputs.salt-version }} artifacts/release - <%- if includes.get("test-pkg-uploads", True) %> - <%- include "test-pkg-repo-uploads.yml.jinja" %> + - name: Upload PyPi Artifacts + uses: actions/upload-artifact@v3 + with: + name: pypi-artifacts + path: | + artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz + artifacts/release/salt-${{ needs.prepare-workflow.outputs.salt-version }}.tar.gz.asc + retention-days: 7 + if-no-files-found: error + + <%- if includes.get("test-pkg-downloads", True) %> + <%- include "test-salt-pkg-repo-downloads.yml.jinja" %> <%- endif %> publish-pypi: <%- do conclusion_needs.append('publish-pypi') %> name: Publish to PyPi(test) + if: ${{ inputs.skip-test-pypi-publish != true && github.event.repository.fork != true }} needs: - prepare-workflow - upload-release-artifacts @@ -171,10 +185,12 @@ concurrency: - linux - repo-<{ gh_environment }> steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }} - name: Setup GnuPG run: | @@ -200,10 +216,10 @@ concurrency: echo "::add-mask::$TWINE_PASSWORD" echo "twine-password=$TWINE_PASSWORD" >> "${GITHUB_OUTPUT}" - - name: Download Source Repository + - name: Download PyPi Artifacts uses: actions/download-artifact@v3 with: - name: salt-${{ needs.prepare-workflow.outputs.salt-version }}-<{ gh_environment }>-src-repo + name: pypi-artifacts path: artifacts/release - name: Publish to Test PyPi diff --git a/.github/workflows/templates/test-package-downloads-action.yml.jinja b/.github/workflows/templates/test-package-downloads-action.yml.jinja new file mode 100644 index 000000000000..4f9502d7aae1 --- /dev/null +++ b/.github/workflows/templates/test-package-downloads-action.yml.jinja @@ -0,0 +1,741 @@ +name: Test Download Packages + +on: + workflow_call: + inputs: + salt-version: + type: string + required: true + description: The Salt version of the packages to install and test + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + environment: + required: true + type: string + description: The environment to run tests against + latest-release: + required: true + type: string + description: The latest salt release + nox-version: + required: true + type: string + description: The nox version to install + python-version: + required: false + type: string + description: The python version to run tests with + default: "3.10" + package-name: + required: false + type: string + description: The onedir package name to use + default: salt + skip-code-coverage: + required: false + type: boolean + description: Skip code coverage + default: false + nox-session: + required: false + type: string + description: The nox session to run + default: ci-test-onedir + +env: + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + linux: + name: Linux + runs-on: + - self-hosted + - linux + - bastion + environment: ${{ inputs.environment }} + timeout-minutes: 120 # 2 Hours - More than this and something is wrong + strategy: + fail-fast: false + matrix: + include: + <%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["linux"] %> + - distro-slug: <{ slug }> + arch: <{ arch }> + pkg-type: <{ pkg_type }> + <%- endfor %> + + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" + + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Download Onedir Tarball as an Artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz + path: artifacts/ + + - name: Decompress Onedir Tarball + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz + + - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3.3.1 + with: + path: nox.${{ matrix.distro-slug }}.tar.* + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-linux + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }} + + - name: List Free Space + run: | + tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + run: | + tools --timestamps vm rsync ${{ matrix.distro-slug }} + + - name: Decompress .nox Directory + run: | + tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }} + + - name: Show System Info & Test Plan + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ + --nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs + + - name: Run Package Download Tests + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ + --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs + + - name: Combine Coverage Reports + if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' + run: | + tools --timestamps vm combine-coverage ${{ matrix.distro-slug }} + + - name: Download Test Run Artifacts + id: download-artifacts-from-vm + if: always() && steps.spin-up-vm.outcome == 'success' + run: | + tools --timestamps vm download-artifacts ${{ matrix.distro-slug }} + # Delete the salt onedir, we won't need it anymore and it will prevent + # from it showing in the tree command below + rm -rf artifacts/salt* + tree -a artifacts + + - name: Destroy VM + if: always() + run: | + tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + + - name: Install Codecov CLI + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + + - name: Upload Source Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/salt.xml ]; then + echo "The artifacts/coverage/salt.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/salt.xml \ + # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/salt.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags salt,${{ matrix.distro-slug }},pkg \ + --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Tests Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/tests.xml ]; then + echo "The artifacts/coverage/tests.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/tests.xml \ + # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/tests.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags tests,${{ matrix.distro-slug }},pkg \ + --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Test Run Artifacts + if: always() && steps.download-artifacts-from-vm.outcome == 'success' + uses: actions/upload-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }} + path: | + artifacts + !artifacts/salt/* + !artifacts/salt-*.tar.* + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && job.status != 'cancelled' && steps.download-artifacts-from-vm.outcome == 'success' + with: + check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true + + + macos: + name: MacOS + runs-on: ${{ matrix.distro-slug }} + environment: ${{ inputs.environment }} + timeout-minutes: 120 # 2 Hours - More than this and something is wrong + strategy: + fail-fast: false + matrix: + include: + <%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["macos"] %> + - distro-slug: <{ slug }> + arch: <{ arch }> + pkg-type: <{ pkg_type }> + <%- endfor %> + + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Download Onedir Tarball as an Artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz + path: artifacts/ + + - name: Install System Dependencies + run: | + brew install tree + + - name: Decompress Onedir Tarball + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz + + - name: Set up Python ${{ inputs.python-version }} + uses: actions/setup-python@v4 + with: + python-version: "${{ inputs.python-version }}" + update-environment: true + + - name: Install Nox + run: | + python3 -m pip install 'nox==${{ inputs.nox-version }}' + + - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3.3.1 + with: + path: nox.${{ matrix.distro-slug }}.tar.* + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true + + - name: Decompress .nox Directory + run: | + nox --force-color -e decompress-dependencies -- ${{ matrix.distro-slug }} + + - name: Show System Info & Test Plan + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "1" + PRINT_TEST_PLAN_ONLY: "1" + PRINT_SYSTEM_INFO: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs + + - name: Run Package Download Tests + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "0" + PRINT_TEST_PLAN_ONLY: "0" + PRINT_SYSTEM_INFO: "0" + RERUN_FAILURES: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ matrix.distro-slug }} + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + + - name: Combine Coverage Reports + if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + run: | + nox --force-color -e combine-coverage + + - name: Prepare Test Run Artifacts + id: download-artifacts-from-vm + if: always() && job.status != 'cancelled' + run: | + # Delete the salt onedir, we won't need it anymore and it will prevent + # from it showing in the tree command below + rm -rf artifacts/salt* + tree -a artifacts + + - name: Install Codecov CLI + if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/macos/codecov + curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + + - name: Upload Source Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/salt.xml ]; then + echo "The artifacts/coverage/salt.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/salt.xml \ + # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/salt.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags salt,${{ matrix.distro-slug }},pkg \ + --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Tests Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/tests.xml ]; then + echo "The artifacts/coverage/tests.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/tests.xml \ + # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/tests.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags tests,${{ matrix.distro-slug }},pkg \ + --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Test Run Artifacts + if: always() + uses: actions/upload-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }} + path: | + artifacts + !artifacts/salt/* + !artifacts/salt-*.tar.* + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && job.status != 'cancelled' + with: + check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true + + + windows: + name: Windows + runs-on: + - self-hosted + - linux + - bastion + environment: ${{ inputs.environment }} + timeout-minutes: 120 # 2 Hours - More than this and something is wrong + strategy: + fail-fast: false + matrix: + include: + <%- for slug, arch, pkg_type in test_salt_pkg_downloads_listing["windows"] %> + - distro-slug: <{ slug }> + arch: <{ arch }> + pkg-type: <{ pkg_type }> + <%- endfor %> + + steps: + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Download Onedir Tarball as an Artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz + path: artifacts/ + + - name: Decompress Onedir Tarball + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz + + - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3.3.1 + with: + path: nox.${{ matrix.distro-slug }}.tar.* + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-windows + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }} + + - name: List Free Space + run: | + tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + run: | + tools --timestamps vm rsync ${{ matrix.distro-slug }} + + - name: Decompress .nox Directory + run: | + tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }} + + - name: Show System Info & Test Plan + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ + --nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs + + - name: Run Package Download Tests + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ + --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs + + - name: Combine Coverage Reports + if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' + run: | + tools --timestamps vm combine-coverage ${{ matrix.distro-slug }} + + - name: Download Test Run Artifacts + id: download-artifacts-from-vm + if: always() && steps.spin-up-vm.outcome == 'success' + run: | + tools --timestamps vm download-artifacts ${{ matrix.distro-slug }} + # Delete the salt onedir, we won't need it anymore and it will prevent + # from it showing in the tree command below + rm -rf artifacts/salt* + tree -a artifacts + + - name: Destroy VM + if: always() + run: | + tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + + - name: Install Codecov CLI + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + + - name: Upload Source Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/salt.xml ]; then + echo "The artifacts/coverage/salt.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/salt.xml \ + # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/salt.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags salt,${{ matrix.distro-slug }},pkg \ + --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Tests Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/tests.xml ]; then + echo "The artifacts/coverage/tests.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/tests.xml \ + # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/tests.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags tests,${{ matrix.distro-slug }},pkg \ + --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Test Run Artifacts + if: always() && steps.download-artifacts-from-vm.outcome == 'success' + uses: actions/upload-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }} + path: | + artifacts + !artifacts/salt/* + !artifacts/salt-*.tar.* + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && job.status != 'cancelled' && steps.download-artifacts-from-vm.outcome == 'success' + with: + check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }} ${{ matrix.pkg-type }} ) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true diff --git a/.github/workflows/templates/test-pkg-repo-uploads.yml.jinja b/.github/workflows/templates/test-pkg-repo-uploads.yml.jinja deleted file mode 100644 index 07f61aa5a4ab..000000000000 --- a/.github/workflows/templates/test-pkg-repo-uploads.yml.jinja +++ /dev/null @@ -1,433 +0,0 @@ - - test-amazon-repo: - <%- do test_repo_needs.append('test-amazon-repo') %> - name: Test Amazon Linux Repository - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - - arm64 - version: - - "2" - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - environment: <{ gh_environment }> - container: ghcr.io/saltstack/salt-ci-containers/amazon-linux:${{ matrix.version }} - needs: - - prepare-workflow - - publish-repositories - - steps: - - name: Import GPG Key - run: | - rpm --import ${{ env.REPO_BASE_URL }}/amazon/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}/SALT-PROJECT-GPG-PUBKEY-2023.pub - - - name: Import Repo File - run: | - curl -fsSL -o /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo ${{ env.REPO_BASE_URL }}/amazon/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}.repo - echo baseurl=${{ env.REPO_BASE_URL }}/amazon/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }} >> /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo - echo gpgkey=${{ env.REPO_BASE_URL }}/amazon/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}/SALT-PROJECT-GPG-PUBKEY-2023.pub >> /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo - - - name: Install Salt - run: | - yum clean expire-cache - yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api - - <%- for command in ('salt-call --local test.versions', - 'salt-call --local grains.items', - 'salt --version', - 'salt-master --version', - 'salt-minion --version', - 'salt-ssh --version', - 'salt-syndic --version', - 'salt-api --version', - 'salt-cloud --version') %> - - - name: Test `<{ command }>` - run: | - <{ command }> - <%- endfor %> - - test-centos-repo: - <%- do test_repo_needs.append('test-centos-repo') %> - name: Test Centos Repository - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - - arm64 - version: - - "7" - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - environment: <{ gh_environment }> - container: ghcr.io/saltstack/salt-ci-containers/centos:${{ matrix.version }} - needs: - - prepare-workflow - - publish-repositories - - steps: - - name: Import GPG Key - run: | - rpm --import ${{ env.REPO_BASE_URL }}/redhat/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}/SALT-PROJECT-GPG-PUBKEY-2023.pub - - - name: Import Repo File - run: | - curl -fsSL -o /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo ${{ env.REPO_BASE_URL }}/redhat/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}.repo - echo baseurl=${{ env.REPO_BASE_URL }}/redhat/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }} >> /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo - echo gpgkey=${{ env.REPO_BASE_URL }}/redhat/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}/SALT-PROJECT-GPG-PUBKEY-2023.pub >> /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo - - - name: Install Salt - run: | - yum clean expire-cache - yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api - - <%- for command in ('salt-call --local test.versions', - 'salt-call --local grains.items', - 'salt --version', - 'salt-master --version', - 'salt-minion --version', - 'salt-ssh --version', - 'salt-syndic --version', - 'salt-api --version', - 'salt-cloud --version') %> - - - name: Test `<{ command }>` - run: | - <{ command }> - <%- endfor %> - - test-centos-stream-repo: - <%- do test_repo_needs.append('test-centos-stream-repo') %> - name: Test Centos Stream Repository - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - - arm64 - version: - - "8" - - "9" - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - environment: <{ gh_environment }> - container: ghcr.io/saltstack/salt-ci-containers/centos-stream:${{ matrix.version }} - needs: - - prepare-workflow - - publish-repositories - - steps: - - name: Import GPG Key - run: | - rpm --import ${{ env.REPO_BASE_URL }}/redhat/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}/SALT-PROJECT-GPG-PUBKEY-2023.pub - - - name: Import Repo File - run: | - curl -fsSL -o /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo ${{ env.REPO_BASE_URL }}/redhat/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}.repo - echo baseurl=${{ env.REPO_BASE_URL }}/redhat/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }} >> /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo - echo gpgkey=${{ env.REPO_BASE_URL }}/redhat/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}/SALT-PROJECT-GPG-PUBKEY-2023.pub >> /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo - - - name: Install Salt - run: | - yum clean expire-cache - yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api - - <%- for command in ('salt-call --local test.versions', - 'salt-call --local grains.items', - 'salt --version', - 'salt-master --version', - 'salt-minion --version', - 'salt-ssh --version', - 'salt-syndic --version', - 'salt-api --version', - 'salt-cloud --version') %> - - - name: Test `<{ command }>` - run: | - <{ command }> - <%- endfor %> - - test-debian-repo: - <%- do test_repo_needs.append('test-debian-repo') %> - name: Test Debian Repository - strategy: - fail-fast: false - matrix: - include: - - {"version": "10", "codename": "buster", "arch": "amd64"} - - {"version": "10", "codename": "buster", "arch": "arm64"} - - {"version": "11", "codename": "bullseye", "arch": "amd64"} - - {"version": "11", "codename": "bullseye", "arch": "arm64"} - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - environment: <{ gh_environment }> - container: ghcr.io/saltstack/salt-ci-containers/debian:${{ matrix.version }} - needs: - - prepare-workflow - - publish-repositories - - steps: - - name: Update System - run: | - apt-get update -y - apt-get install -y curl - - - name: Download GPG Key - run: | - curl -fsSL -o /usr/share/keyrings/SALT-PROJECT-GPG-PUBKEY-2023.gpg \ - ${{ env.REPO_BASE_URL }}/debian/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version}}/SALT-PROJECT-GPG-PUBKEY-2023.gpg - - - name: Setup Repo - run: | - echo "deb [signed-by=/usr/share/keyrings/SALT-PROJECT-GPG-PUBKEY-2023.gpg arch=${{ matrix.arch }}] ${{ env.REPO_BASE_URL }}/debian/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version}} ${{ matrix.codename }} main" | sudo tee /etc/apt/sources.list.d/salt.list - - - name: Install Salt - run: | - apt-get update - apt-get install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api - yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api - - <%- for command in ('salt-call --local test.versions', - 'salt-call --local grains.items', - 'salt --version', - 'salt-master --version', - 'salt-minion --version', - 'salt-ssh --version', - 'salt-syndic --version', - 'salt-api --version', - 'salt-cloud --version') %> - - - name: Test `<{ command }>` - run: | - <{ command }> - <%- endfor %> - - test-fedora-repo: - <%- do test_repo_needs.append('test-fedora-repo') %> - name: Test Fedora Repository - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - - arm64 - version: - - "36" - - "37" - - "38" - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - environment: <{ gh_environment }> - container: ghcr.io/saltstack/salt-ci-containers/fedora:${{ matrix.version }} - needs: - - prepare-workflow - - publish-repositories - - steps: - - name: Import GPG Key - run: | - rpm --import ${{ env.REPO_BASE_URL }}/fedora/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}/SALT-PROJECT-GPG-PUBKEY-2023.pub - - - name: Import Repo File - run: | - curl -fsSL -o /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo ${{ env.REPO_BASE_URL }}/fedora/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}.repo - echo baseurl=${{ env.REPO_BASE_URL }}/fedora/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }} >> /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo - echo gpgkey=${{ env.REPO_BASE_URL }}/fedora/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version }}/SALT-PROJECT-GPG-PUBKEY-2023.pub >> /etc/yum.repos.d/salt-${{ needs.prepare-workflow.outputs.salt-version }}.repo - - - name: Install Salt - run: | - yum clean expire-cache - yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api - - <%- for command in ('salt-call --local test.versions', - 'salt-call --local grains.items', - 'salt --version', - 'salt-master --version', - 'salt-minion --version', - 'salt-ssh --version', - 'salt-syndic --version', - 'salt-api --version', - 'salt-cloud --version') %> - - - name: Test `<{ command }>` - run: | - <{ command }> - <%- endfor %> - - test-ubuntu-repo: - <%- do test_repo_needs.append('test-ubuntu-repo') %> - name: Test Ubuntu Repository - strategy: - fail-fast: false - matrix: - include: - - {"version": "20.04", "codename": "focal", "arch": "amd64"} - - {"version": "20.04", "codename": "focal", "arch": "arm64"} - - {"version": "22.04", "codename": "jammy", "arch": "amd64"} - - {"version": "22.04", "codename": "jammy", "arch": "arm64"} - runs-on: - - self-hosted - - linux - - ${{ matrix.arch }} - environment: <{ gh_environment }> - container: ghcr.io/saltstack/salt-ci-containers/ubuntu:${{ matrix.version }} - needs: - - prepare-workflow - - publish-repositories - - steps: - - name: Update System - run: | - apt-get update -y - apt-get install -y curl - - - name: Download GPG Key - run: | - curl -fsSL -o /usr/share/keyrings/SALT-PROJECT-GPG-PUBKEY-2023.gpg \ - ${{ env.REPO_BASE_URL }}/ubuntu/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version}}/SALT-PROJECT-GPG-PUBKEY-2023.gpg - - - name: Setup Repo - run: | - echo "deb [signed-by=/usr/share/keyrings/SALT-PROJECT-GPG-PUBKEY-2023.gpg arch=${{ matrix.arch }}] ${{ env.REPO_BASE_URL }}/ubuntu/${{ matrix.version }}/${{ matrix.arch }}/minor/${{ needs.prepare-workflow.outputs.salt-version}} ${{ matrix.codename }} main" | sudo tee /etc/apt/sources.list.d/salt.list - - - name: Install Salt - run: | - apt-get update - apt-get install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api - yum install -y salt-master salt-minion salt-ssh salt-syndic salt-cloud salt-api - - <%- for command in ('salt-call --local test.versions', - 'salt-call --local grains.items', - 'salt --version', - 'salt-master --version', - 'salt-minion --version', - 'salt-ssh --version', - 'salt-syndic --version', - 'salt-api --version', - 'salt-cloud --version') %> - - - name: Test `<{ command }>` - run: | - <{ command }> - <%- endfor %> - - test-macos-repo: - <%- do test_repo_needs.append('test-macos-repo') %> - name: Test macOS Repository - strategy: - fail-fast: false - matrix: - arch: - - x86_64 - version: - - "12" - runs-on: - - macos-${{ matrix.version }} - environment: <{ gh_environment }> - needs: - - prepare-workflow - - publish-repositories - - steps: - - - name: Install Salt - run: | - curl -fsSL -o /tmp/salt-${{ needs.prepare-workflow.outputs.salt-version }}-py3-${{ matrix.arch }}.pkg \ - ${{ env.REPO_BASE_URL }}/macos/minor/${{ needs.prepare-workflow.outputs.salt-version}}/salt-${{ needs.prepare-workflow.outputs.salt-version }}-py3-${{ matrix.arch }}.pkg || \ - curl -fsSL -o /tmp/salt-${{ needs.prepare-workflow.outputs.salt-version }}-py3-${{ matrix.arch }}-unsigned.pkg \ - ${{ env.REPO_BASE_URL }}/macos/minor/${{ needs.prepare-workflow.outputs.salt-version}}/salt-${{ needs.prepare-workflow.outputs.salt-version }}-py3-${{ matrix.arch }}-unsigned.pkg - - if [ -f /tmp/salt-${{ needs.prepare-workflow.outputs.salt-version }}-py3-${{ matrix.arch }}.pkg ]; then - installer -pkg /tmp/salt-${{ needs.prepare-workflow.outputs.salt-version }}-py3-${{ matrix.arch }}.pkg -target / - elif [ -f /tmp/salt-${{ needs.prepare-workflow.outputs.salt-version }}-py3-${{ matrix.arch }}-unsigned.pkg ]; then - installer -pkg /tmp/salt-${{ needs.prepare-workflow.outputs.salt-version }}-py3-${{ matrix.arch }}-unsigned.pkg -target / - else - echo "No downloaded Package?" - exit 1 - fi - - <%- for command in ('salt-call --local test.versions', - 'salt-call --local grains.items', - 'salt --version', - 'salt-master --version', - 'salt-minion --version', - 'salt-ssh --version', - 'salt-syndic --version', - 'salt-api --version', - 'salt-cloud --version') %> - - - name: Test `<{ command }>` - run: | - <{ command }> - <%- endfor %> - - <%- for install_type in ("setup",) %> - - test-windows-<{ install_type }>-repo: - <%- do test_repo_needs.append('test-windows-{}-repo'.format(install_type)) %> - name: Test Windows Repository(<{ install_type }>) - strategy: - fail-fast: false - matrix: - arch: - - x86 - - AMD64 - version: - - "2019" - runs-on: - - windows-${{ matrix.version }} - environment: <{ gh_environment }> - needs: - - prepare-workflow - - publish-repositories - - steps: - - - name: Download Salt - shell: bash - run: | - <%- if install_type == "msi" %> - curl -fsSL -o Salt-Minion-${{ needs.prepare-workflow.outputs.salt-version }}-Py3-${{ matrix.arch }}.msi \ - ${{ env.REPO_BASE_URL }}/windows/minor/${{ needs.prepare-workflow.outputs.salt-version}}/Salt-Minion-${{ needs.prepare-workflow.outputs.salt-version }}-Py3-${{ matrix.arch }}.msi - <%- elif install_type == "setup" %> - curl -fsSL -o Salt-Minion-${{ needs.prepare-workflow.outputs.salt-version }}-Py3-${{ matrix.arch }}-Setup.exe \ - ${{ env.REPO_BASE_URL }}/windows/minor/${{ needs.prepare-workflow.outputs.salt-version}}/Salt-Minion-${{ needs.prepare-workflow.outputs.salt-version }}-Py3-${{ matrix.arch }}-Setup.exe - <%- endif %> - - - name: Install Salt - shell: powershell - run: | - <%- if install_type == "msi" %> - Salt-Minion-${{ needs.prepare-workflow.outputs.salt-version }}-Py3-${{ matrix.arch }}.msi /start-minion=0 /S - <%- elif install_type == "setup" %> - Salt-Minion-${{ needs.prepare-workflow.outputs.salt-version }}-Py3-${{ matrix.arch }}.exe /start-minion=0 /S - <%- endif %> - - <%- for command in ('salt-call --local test.versions', - 'salt-call --local grains.items', - 'salt --version', - 'salt-master --version', - 'salt-minion --version', - 'salt-ssh --version', - 'salt-syndic --version', - 'salt-api --version', - 'salt-cloud --version') %> - - - name: Test `<{ command }>` - run: | - <{ command }> - <%- endfor %> - <%- endfor %> diff --git a/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja b/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja new file mode 100644 index 000000000000..005c5272ac31 --- /dev/null +++ b/.github/workflows/templates/test-salt-pkg-repo-downloads.yml.jinja @@ -0,0 +1,36 @@ + + <%- set job_name = "pkg-download-tests" %> + + <{ job_name }>: + <%- do test_repo_needs.append(job_name) %> + <%- do conclusion_needs.append(job_name) %> + name: Package Downloads + <%- if gh_environment == "staging" %> + if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg-download'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} + <%- else %> + if: ${{ inputs.skip-salt-pkg-download-test-suite == false }} + <%- endif %> + needs: + - prepare-workflow + - publish-repositories + <%- for slug in test_salt_pkg_downloads_needs_slugs %> + - <{ slug }> + <%- endfor %> + <%- if gh_environment == "release" %> + - download-onedir-artifact + <%- else %> + - build-salt-onedir-linux + - build-salt-onedir-macos + - build-salt-onedir-windows + <%- endif %> + uses: ./.github/workflows/test-package-downloads-action.yml + with: + nox-session: ci-test-onedir + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> + salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" + environment: <{ gh_environment }> + nox-version: <{ nox_version }> + python-version: "<{ gh_actions_workflows_python_version }>" + skip-code-coverage: true + latest-release: "${{ needs.prepare-workflow.outputs.latest-release }}" + secrets: inherit diff --git a/.github/workflows/templates/test-salt-pkg.yml.jinja b/.github/workflows/templates/test-salt-pkg.yml.jinja index 3cc2b1d6c61c..eb8b43d071ff 100644 --- a/.github/workflows/templates/test-salt-pkg.yml.jinja +++ b/.github/workflows/templates/test-salt-pkg.yml.jinja @@ -1,109 +1,91 @@ - - <%- set linux_pkg_tests = ( - ("almalinux-8", "Alma Linux 8", "x86_64", "rpm"), - ("almalinux-9", "Alma Linux 9", "x86_64", "rpm"), - ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm"), - ("centos-7", "CentOS 7", "x86_64", "rpm"), - ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm"), - ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm"), - ("debian-10", "Debian 10", "x86_64", "deb"), - ("debian-11", "Debian 11", "x86_64", "deb"), - ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"), - ("fedora-36", "Fedora 36", "x86_64", "rpm"), - ("fedora-37", "Fedora 37", "x86_64", "rpm"), - ("fedora-38", "Fedora 38", "x86_64", "rpm"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb") - ) %> - - <%- set linux_pkg_tests = ( - ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm"), - ("centos-7", "CentOS 7", "x86_64", "rpm"), - ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm"), - ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm"), - ("debian-10", "Debian 10", "x86_64", "deb"), - ("debian-11", "Debian 11", "x86_64", "deb"), - ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb") - ) %> - - <%- for slug, display_name, arch, pkg_type in linux_pkg_tests %> + <%- for slug, display_name, arch, pkg_type, fips in test_salt_pkg_listing["linux"] %> <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> - name: <{ display_name }> Package Tests + name: <{ display_name }> Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-<{ pkg_type }>-pkgs + - build-<{ pkg_type }>-pkgs-onedir + - <{ slug.replace(".", "") }>-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: <{ slug }> + nox-session: ci-test-onedir platform: linux arch: <{ arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: <{ pkg_type }> - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_linux }> + nox-version: <{ nox_version }> + python-version: "<{ gh_actions_workflows_python_version }>" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} + <%- if fips == "fips" %> + fips: true + <%- endif %> <%- endfor %> - <%- for slug, display_name, arch in (("macos-12", "macOS 12", "x86_64"),) %> + <%- for slug, display_name, arch in test_salt_pkg_listing["macos"] %> <%- set job_name = "{}-pkg-tests".format(slug.replace(".", "")) %> <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> - name: <{ display_name }> Package Tests + name: <{ display_name }> Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-macos-pkgs + - build-macos-pkgs-onedir + - <{ slug.replace(".", "") }>-ci-deps uses: ./.github/workflows/test-packages-action-macos.yml with: distro-slug: <{ slug }> + nox-session: ci-test-onedir platform: darwin arch: <{ arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: macos - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_macos }> + nox-version: <{ nox_version }> + python-version: "<{ gh_actions_workflows_python_version }>" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} <%- endfor %> - <%- for slug, display_name, arch in (("windows-2016", "Windows 2016", "amd64"), - ("windows-2019", "Windows 2019", "amd64"), - ("windows-2022", "Windows 2022", "amd64")) %> + <%- for slug, display_name, arch in test_salt_pkg_listing["windows"] %> <%- for pkg_type in ("NSIS", "MSI") %> <%- set job_name = "{}-{}-pkg-tests".format(slug.replace(".", ""), pkg_type.lower()) %> <{ job_name }>: <%- do test_salt_pkg_needs.append(job_name) %> - name: <{ display_name }> <{ pkg_type }> Package Tests + name: <{ display_name }> <{ pkg_type }> Package Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test-pkg'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-windows-pkgs + - build-windows-pkgs-onedir + - <{ slug.replace(".", "") }>-ci-deps uses: ./.github/workflows/test-packages-action.yml with: distro-slug: <{ slug }> + nox-session: ci-test-onedir platform: windows arch: <{ arch }> salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" pkg-type: <{ pkg_type }> - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }> + nox-version: <{ nox_version }> + python-version: "<{ gh_actions_workflows_python_version }>" + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + testing-releases: ${{ needs.prepare-workflow.outputs.testing-releases }} <%- endfor %> <%- endfor %> diff --git a/.github/workflows/templates/test-salt.yml.jinja b/.github/workflows/templates/test-salt.yml.jinja index f79c4124640b..9c25d7f0a8c5 100644 --- a/.github/workflows/templates/test-salt.yml.jinja +++ b/.github/workflows/templates/test-salt.yml.jinja @@ -1,90 +1,90 @@ +<%- if workflow_slug in ("nightly", "scheduled") %> + <%- set timeout_value = 360 %> +<%- else %> + <%- set timeout_value = 180 %> +<%- endif %> - <%- for slug, display_name, arch in (("windows-2016", "Windows 2016", "amd64"), - ("windows-2019", "Windows 2019", "amd64"), - ("windows-2022", "Windows 2022", "amd64")) %> + <%- for slug, display_name, arch in test_salt_listing["windows"] %> <{ slug.replace(".", "") }>: <%- do test_salt_needs.append(slug.replace(".", "")) %> - name: <{ display_name }> + name: <{ display_name }> Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - <{ slug.replace(".", "") }>-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: <{ slug }> nox-session: ci-test-onedir platform: windows arch: amd64 + nox-version: <{ nox_version }> + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_windows }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + workflow-slug: <{ workflow_slug }> + default-timeout: <{ timeout_value }> <%- endfor %> - <%- for slug, display_name, arch in (("macos-12", "macOS 12", "x86_64"),) %> + + <%- for slug, display_name, arch in test_salt_listing["macos"] %> <{ slug.replace(".", "") }>: <%- do test_salt_needs.append(slug.replace(".", "")) %> - name: <{ display_name }> + name: <{ display_name }> Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['github-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - <{ slug.replace(".", "") }>-ci-deps uses: ./.github/workflows/test-action-macos.yml with: distro-slug: <{ slug }> nox-session: ci-test-onedir platform: darwin - arch: x86_64 + arch: <{ arch }> + nox-version: <{ nox_version }> + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_macos }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + workflow-slug: <{ workflow_slug }> + default-timeout: <{ timeout_value }> <%- endfor %> - <%- for slug, display_name, arch in (("almalinux-8", "Alma Linux 8", "x86_64"), - ("almalinux-9", "Alma Linux 9", "x86_64"), - ("amazonlinux-2", "Amazon Linux 2", "x86_64"), - ("archlinux-lts", "Arch Linux LTS", "x86_64"), - ("centos-7", "CentOS 7", "x86_64"), - ("centosstream-8", "CentOS Stream 8", "x86_64"), - ("centosstream-9", "CentOS Stream 9", "x86_64"), - ("debian-10", "Debian 10", "x86_64"), - ("debian-11", "Debian 11", "x86_64"), - ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), - ("fedora-36", "Fedora 36", "x86_64"), - ("fedora-37", "Fedora 37", "x86_64"), - ("fedora-38", "Fedora 38", "x86_64"), - ("opensuse-15", "Opensuse 15", "x86_64"), - ("photonos-3", "Photon OS 3", "x86_64"), - ("photonos-4", "Photon OS 4", "x86_64"), - ("ubuntu-20.04", "Ubuntu 20.04", "x86_64"), - ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"), - ("ubuntu-22.04", "Ubuntu 22.04", "x86_64"), - ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64")) %> + <%- for slug, display_name, arch, fips in test_salt_listing["linux"] %> <{ slug.replace(".", "") }>: <%- do test_salt_needs.append(slug.replace(".", "")) %> - name: <{ display_name }> + name: <{ display_name }> Test if: ${{ fromJSON(needs.prepare-workflow.outputs.jobs)['test'] && fromJSON(needs.prepare-workflow.outputs.runners)['self-hosted'] }} needs: - prepare-workflow - - build-salt-onedir + - <{ slug.replace(".", "") }>-ci-deps uses: ./.github/workflows/test-action.yml with: distro-slug: <{ slug }> nox-session: ci-test-onedir platform: linux arch: <{ arch }> + nox-version: <{ nox_version }> + gh-actions-python-version: "<{ gh_actions_workflows_python_version }>" testrun: ${{ needs.prepare-workflow.outputs.testrun }} salt-version: "${{ needs.prepare-workflow.outputs.salt-version }}" - cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version_linux }> + cache-prefix: ${{ needs.prepare-workflow.outputs.cache-seed }}|<{ python_version }> skip-code-coverage: <{ skip_test_coverage_check }> skip-junit-reports: <{ skip_junit_reports_check }> + workflow-slug: <{ workflow_slug }> + default-timeout: <{ timeout_value }> + <%- if fips == "fips" %> + fips: true + <%- endif %> <%- endfor %> diff --git a/.github/workflows/templates/trigger-branch-workflows.yml.jinja b/.github/workflows/templates/trigger-branch-workflows.yml.jinja index c34b6ee17059..24d0147b9152 100644 --- a/.github/workflows/templates/trigger-branch-workflows.yml.jinja +++ b/.github/workflows/templates/trigger-branch-workflows.yml.jinja @@ -6,9 +6,8 @@ <{ job_name }>: <%- do conclusion_needs.append(job_name) %> name: Trigger Branch Workflows - if: ${{ github.event_name == 'schedule' }} - runs-on: - - ubuntu-latest + if: ${{ github.event_name == 'schedule' && fromJSON(needs.workflow-requirements.outputs.requirements-met) }} + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - workflow-requirements steps: diff --git a/.github/workflows/templates/workflow-requirements-check.yml.jinja b/.github/workflows/templates/workflow-requirements-check.yml.jinja index 8fe06e6d6bbe..a18c13c69f5a 100644 --- a/.github/workflows/templates/workflow-requirements-check.yml.jinja +++ b/.github/workflows/templates/workflow-requirements-check.yml.jinja @@ -4,14 +4,14 @@ <{ job_name }>: <%- do prepare_workflow_needs.append(job_name) %> name: Check Workflow Requirements - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: requirements-met: ${{ steps.check-requirements.outputs.requirements-met }} steps: - name: Check Requirements id: check-requirements run: | - if [ "${RUN_SCHEDULED_BUILDS}" = "1" ]; then + if [ "${{ vars.RUN_SCHEDULED_BUILDS }}" = "1" ]; then MSG="Running workflow because RUN_SCHEDULED_BUILDS=1" echo "${MSG}" echo "${MSG}" >> "${GITHUB_STEP_SUMMARY}" diff --git a/.github/workflows/test-action-macos.yml b/.github/workflows/test-action-macos.yml index 92e5300c5ee9..085695122c90 100644 --- a/.github/workflows/test-action-macos.yml +++ b/.github/workflows/test-action-macos.yml @@ -16,11 +16,11 @@ on: required: true type: string description: JSON string containing information about what and how to run the test suite - python-version: + gh-actions-python-version: required: false type: string description: The python version to run tests with - default: "3.9" + default: "3.11" salt-version: type: string required: true @@ -37,6 +37,10 @@ on: required: true type: string description: The platform arch being tested + nox-version: + required: true + type: string + description: The nox version to install package-name: required: false type: string @@ -52,10 +56,18 @@ on: type: boolean description: Skip Publishing JUnit Reports default: false - + workflow-slug: + required: false + type: string + description: Which workflow is running. + default: ci + default-timeout: + required: false + type: number + description: Timeout, in minutes, for the test job(Default 360, 6 hours). + default: 360 env: - NOX_VERSION: "2022.8.7" COLUMNS: 190 PIP_INDEX_URL: "https://pypi-proxy.saltstack.net/root/local/+simple/" PIP_EXTRA_INDEX_URL: "https://pypi.org/simple" @@ -63,130 +75,54 @@ env: jobs: generate-matrix: - name: Generate Test Matrix - runs-on: ubuntu-latest + name: Test Matrix + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} outputs: matrix-include: ${{ steps.generate-matrix.outputs.matrix }} - transport-matrix-include: ${{ steps.generate-transport-matrix.outputs.matrix }} steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Test Matrix - id: generate-matrix - run: | - TEST_MATRIX=$(tools ci matrix ${{ inputs.distro-slug }}) - echo "$TEST_MATRIX" - echo "matrix=$TEST_MATRIX" >> "$GITHUB_OUTPUT" - - - name: Generate Transport Matrix - id: generate-transport-matrix + - name: "Throttle Builds" + shell: bash run: | - TRANSPORT_MATRIX=$(tools ci transport-matrix ${{ inputs.distro-slug }}) - echo "$TRANSPORT_MATRIX" - echo "matrix=$TRANSPORT_MATRIX" >> "$GITHUB_OUTPUT" + t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" - dependencies: - name: Setup Test Dependencies - runs-on: ${{ inputs.distro-slug }} - timeout-minutes: 90 - needs: - - generate-matrix - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }} - steps: - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} - id: nox-dependencies-cache - uses: actions/cache@v3 - with: - path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }} - - - name: Download Onedir Tarball as an Artifact - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/ + uses: actions/checkout@v4 - - name: Decompress Onedir Tarball - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - - - name: Set up Python ${{ inputs.python-version }} - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/setup-python@v4 + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts with: - python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-prefix }} - - name: Install System Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - brew install openssl@3 - - - name: Install Nox - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' - - - name: Define Nox Session - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - if [ "${{ matrix.transport }}" != "tcp" ]; then - echo NOX_SESSION=${{ inputs.nox-session}} >> "$GITHUB_ENV" - else - echo NOX_SESSION=${{ inputs.nox-session}}-tcp >> "$GITHUB_ENV" - fi - - - name: Install Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - env: - PRINT_TEST_SELECTION: "0" - PRINT_SYSTEM_INFO: "0" - run: | - export PYCURL_SSL_LIBRARY=openssl - export LDFLAGS="-L/usr/local/opt/openssl@3/lib" - export CPPFLAGS="-I/usr/local/opt/openssl@3/include" - export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig" - nox --install-only -e ${{ env.NOX_SESSION }} - - - name: Cleanup .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - nox -e "pre-archive-cleanup(pkg=False)" - - - name: Compress .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + - name: Generate Test Matrix + id: generate-matrix run: | - nox -e compress-dependencies -- ${{ inputs.distro-slug }} + tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ inputs.distro-slug }} test: name: Test runs-on: ${{ inputs.distro-slug }} - timeout-minutes: 360 # 6 Hours + # Full test runs. Each chunk should never take more than 2 hours. + # Partial test runs(no chunk parallelization), 6 Hours + timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} needs: - generate-matrix - - dependencies strategy: fail-fast: false matrix: include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }} + env: + SALT_TRANSPORT: ${{ matrix.transport }} steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + - name: Checkout Source Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Salt Version run: | @@ -210,31 +146,27 @@ jobs: brew install tree - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ inputs.python-version }}|${{ hashFiles('requirements/**/*.txt') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.gh-actions-python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - - name: Set up Python ${{ inputs.python-version }} + - name: Set up Python ${{ inputs.gh-actions-python-version }} uses: actions/setup-python@v4 with: - python-version: "${{ inputs.python-version }}" + python-version: "${{ inputs.gh-actions-python-version }}" - name: Install Nox run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + python3 -m pip install 'nox==${{ inputs.nox-version }}' - name: Decompress .nox Directory run: | - nox -e decompress-dependencies -- ${{ inputs.distro-slug }} - - - name: Define Nox Session - run: | - if [ "${{ matrix.transport }}" != "tcp" ]; then - echo NOX_SESSION=${{ inputs.nox-session}} >> "$GITHUB_ENV" - else - echo NOX_SESSION=${{ inputs.nox-session}}-tcp >> "$GITHUB_ENV" - fi + nox --force-color -e decompress-dependencies -- ${{ inputs.distro-slug }} - name: Download testrun-changed-files.txt if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} @@ -252,11 +184,47 @@ jobs: SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "1" run: | - sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- -k "mac or darwin" + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- -k "mac or darwin" + + - name: Run Fast/Changed Tests + id: run-fast-changed-tests + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "0" + PRINT_TEST_PLAN_ONLY: "0" + PRINT_SYSTEM_INFO: "0" + RERUN_FAILURES: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \ + -k "mac or darwin" --suppress-no-test-exit-code \ + --from-filenames=testrun-changed-files.txt - name: Run Slow/Changed Tests id: run-slow-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }} + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "0" + PRINT_TEST_PLAN_ONLY: "0" + PRINT_SYSTEM_INFO: "0" + RERUN_FAILURES: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \ + -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests \ + --from-filenames=testrun-changed-files.txt + + - name: Run Core/Changed Tests + id: run-core-changed-tests + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" @@ -266,13 +234,66 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} run: | - sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \ - -k "mac or darwin" --suppress-no-test-exit-code --run-slow --from-filenames=testrun-changed-files.txt + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \ + -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests \ + --from-filenames=testrun-changed-files.txt - name: Run Fast Tests id: run-fast-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }} + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "0" + PRINT_TEST_PLAN_ONLY: "0" + PRINT_SYSTEM_INFO: "0" + RERUN_FAILURES: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \ + -k "mac or darwin" --suppress-no-test-exit-code + + - name: Run Slow Tests + id: run-slow-tests + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }} + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "0" + PRINT_TEST_PLAN_ONLY: "0" + PRINT_SYSTEM_INFO: "0" + RERUN_FAILURES: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \ + -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --slow-tests + + - name: Run Core Tests + id: run-core-tests + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }} + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "0" + PRINT_TEST_PLAN_ONLY: "0" + PRINT_SYSTEM_INFO: "0" + RERUN_FAILURES: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \ + -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --core-tests + + - name: Run Flaky Tests + id: run-flaky-tests + if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }} env: SKIP_REQUIREMENTS_INSTALL: "1" PRINT_TEST_SELECTION: "0" @@ -282,9 +303,10 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} run: | - sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \ - -k "mac or darwin" + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \ + -k "mac or darwin" --suppress-no-test-exit-code --no-fast-tests --flaky-jail - name: Run Full Tests id: run-full-tests @@ -298,121 +320,104 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} run: | - sudo -E nox -e ${{ env.NOX_SESSION }} -- ${{ matrix.tests-chunk }} -- \ - --run-slow -k "mac or darwin" + sudo -E nox --force-color -e ${{ inputs.nox-session }} -- ${{ matrix.tests-chunk }} -- \ + --slow-tests --core-tests -k "mac or darwin" - name: Fix file ownership run: | sudo chown -R "$(id -un)" . - name: Combine Coverage Reports - if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + if: always() && inputs.skip-code-coverage == false run: | - nox -e combine-coverage + nox --force-color -e combine-coverage - name: Prepare Test Run Artifacts id: download-artifacts-from-vm - if: always() && job.status != 'cancelled' + if: always() run: | # Delete the salt onedir, we won't need it anymore and it will prevent # from it showing in the tree command below rm -rf artifacts/salt* tree -a artifacts if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then - mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }} - echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV + mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }} fi - name: Upload Code Coverage Test Run Artifacts if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' uses: actions/upload-artifact@v3 with: - name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }} path: | artifacts/coverage/ - name: Upload JUnit XML Test Run Artifacts - if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }} path: | artifacts/xml-unittests-output/ - name: Upload Test Run Log Artifacts - if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }} path: | artifacts/logs + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && inputs.skip-junit-reports == false + with: + check_name: Test Results(${{ inputs.distro-slug }}, transport=${{ matrix.transport }}, tests-chunk=${{ matrix.tests-chunk }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true + report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }}) - if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped' - runs-on: ubuntu-latest + name: Test Reports + if: always() && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} needs: - - generate-matrix - test - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }} steps: - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Define Nox Session - run: | - if [ "${{ matrix.transport }}" != "tcp" ]; then - echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - else - echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - fi + uses: actions/checkout@v4 - name: Download Code Coverage Test Run Artifacts uses: actions/download-artifact@v3 if: ${{ inputs.skip-code-coverage == false }} id: download-coverage-artifacts with: - name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }} path: artifacts/coverage/ - - name: Download JUnit XML Test Run Artifacts - uses: actions/download-artifact@v3 - id: download-junit-artifacts - with: - name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} - path: artifacts/xml-unittests-output/ - - name: Show Downloaded Test Run Artifacts run: | tree -a artifacts - - name: Set up Python 3.9 + - name: Set up Python ${{ inputs.gh-actions-python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: "${{ inputs.gh-actions-python-version }}" - name: Install Nox run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + python3 -m pip install 'nox==${{ inputs.nox-version }}' - - name: Combine Code Coverage - if: ${{ inputs.skip-code-coverage == false }} - continue-on-error: true + - name: Create XML Coverage Reports + if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled' run: | - nox --force-color -e combine-coverage - - - name: Upload Code Coverage DB - if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' - uses: actions/upload-artifact@v3 - with: - name: code-coverage - path: artifacts/coverage + nox --force-color -e create-xml-coverage-reports + mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml + mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml - name: Report Salt Code Coverage if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' @@ -420,23 +425,21 @@ jobs: run: | nox --force-color -e report-coverage -- salt - - name: Report Tests Code Coverage + - name: Report Combined Code Coverage if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' continue-on-error: true run: | - nox --force-color -e report-coverage -- tests + nox --force-color -e report-coverage - - name: Report Combined Code Coverage + - name: Rename Code Coverage DB if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' continue-on-error: true run: | - nox --force-color -e report-coverage + mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }} - - name: Publish Test Report - uses: mikepenz/action-junit-report@v3 - # always run even if the previous steps fails - if: always() && inputs.skip-junit-reports == false && steps.download-junit-artifacts.outcome == 'success' + - name: Upload Code Coverage DB + if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' + uses: actions/upload-artifact@v3 with: - check_name: Test Results(${{ inputs.distro-slug }}) - report_paths: 'artifacts/xml-unittests-output/*.xml' - annotate_only: true + name: all-testrun-coverage-artifacts + path: artifacts/coverage diff --git a/.github/workflows/test-action.yml b/.github/workflows/test-action.yml index b2184bac7a32..f8635539cbd7 100644 --- a/.github/workflows/test-action.yml +++ b/.github/workflows/test-action.yml @@ -32,6 +32,20 @@ on: required: true type: string description: The platform arch being tested + nox-version: + required: true + type: string + description: The nox version to install + gh-actions-python-version: + required: false + type: string + description: The python version to run tests with + default: "3.10" + fips: + required: false + type: boolean + default: false + description: Test run with FIPS enabled package-name: required: false type: string @@ -47,10 +61,18 @@ on: type: boolean description: Skip Publishing JUnit Reports default: false - + workflow-slug: + required: false + type: string + description: Which workflow is running. + default: ci + default-timeout: + required: false + type: number + description: Timeout, in minutes, for the test job(Default 360, 6 hours). + default: 360 env: - NOX_VERSION: "2022.8.7" COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" @@ -60,139 +82,32 @@ env: jobs: generate-matrix: - name: Generate Test Matrix + name: Test Matrix runs-on: - self-hosted - linux - x86_64 outputs: matrix-include: ${{ steps.generate-matrix.outputs.matrix }} - transport-matrix-include: ${{ steps.generate-transport-matrix.outputs.matrix }} - steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Test Matrix - id: generate-matrix - run: | - TEST_MATRIX=$(tools ci matrix ${{ inputs.distro-slug }}) - echo "$TEST_MATRIX" - echo "matrix=$TEST_MATRIX" >> "$GITHUB_OUTPUT" - - - name: Generate Transport Matrix - id: generate-transport-matrix - run: | - TRANSPORT_MATRIX=$(tools ci transport-matrix ${{ inputs.distro-slug }}) - echo "$TRANSPORT_MATRIX" - echo "matrix=$TRANSPORT_MATRIX" >> "$GITHUB_OUTPUT" - - dependencies: - name: Setup Test Dependencies - needs: - - generate-matrix - runs-on: - - self-hosted - - linux - - bastion - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }} steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} - id: nox-dependencies-cache - uses: actions/cache@v3 - with: - path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - - - name: Download Onedir Tarball as an Artifact - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/ - - - name: Decompress Onedir Tarball - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + - name: "Throttle Builds" shell: bash run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" - - name: PyPi Proxy - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - sed -i '7s;^;--index-url=https://pypi-proxy.saltstack.net/root/local/+simple/ --extra-index-url=https://pypi.org/simple\n;' requirements/static/ci/*/*.txt + - name: Checkout Source Code + uses: actions/checkout@v4 - name: Setup Python Tools Scripts - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - - name: Define Nox Session - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - if [ "${{ matrix.transport }}" != "tcp" ]; then - echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - else - echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - fi - - - name: Get Salt Project GitHub Actions Bot Environment - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") - SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) - echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - - - name: Start VM - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - id: spin-up-vm - run: | - tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }} - - - name: List Free Space - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true - - - name: Upload Checkout To VM - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm rsync ${{ inputs.distro-slug }} - - - name: Install Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm install-dependencies --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} - - - name: Cleanup .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} - - - name: Compress .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} - - - name: Download Compressed .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} - - - name: Destroy VM - if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' + - name: Generate Test Matrix + id: generate-matrix run: | - tools --timestamps vm destroy ${{ inputs.distro-slug }} + tools ci matrix --workflow=${{ inputs.workflow-slug }} ${{ fromJSON(inputs.testrun)['type'] == 'full' && '--full ' || '' }}${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} test: name: Test @@ -200,18 +115,28 @@ jobs: - self-hosted - linux - bastion - timeout-minutes: 300 # 5 Hours - More than this and something is wrong + # Full test runs. Each chunk should never take more than 2 hours. + # Partial test runs(no chunk parallelization), 6 Hours + timeout-minutes: ${{ fromJSON(inputs.testrun)['type'] == 'full' && inputs.default-timeout || 360 }} needs: - - dependencies - generate-matrix strategy: fail-fast: false matrix: include: ${{ fromJSON(needs.generate-matrix.outputs.matrix-include) }} + env: + SALT_TRANSPORT: ${{ matrix.transport }} + TEST_GROUP: ${{ matrix.test-group || 1 }} steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + - name: Checkout Source Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Setup Salt Version run: | @@ -231,10 +156,14 @@ jobs: tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} - uses: actions/cache@v3 + uses: actions/cache@v3.3.1 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ matrix.transport }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.gh-actions-python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: PyPi Proxy run: | @@ -242,14 +171,8 @@ jobs: - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts - - - name: Define Nox Session - run: | - if [ "${{ matrix.transport }}" != "tcp" ]; then - echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - else - echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - fi + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Download testrun-changed-files.txt if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} @@ -285,42 +208,85 @@ jobs: - name: Show System Info & Test Plan run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ - --nox-session=${{ env.NOX_SESSION }} ${{ inputs.distro-slug }} \ + --nox-session=${{ inputs.nox-session }} ${{ inputs.distro-slug }} \ ${{ matrix.tests-chunk }} + - name: Run Fast/Changed Tests + id: run-fast-changed-tests + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] == false }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ matrix.tests-chunk }} -- --suppress-no-test-exit-code \ + --from-filenames=testrun-changed-files.txt + - name: Run Slow/Changed Tests id: run-slow-changed-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] == false }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests --suppress-no-test-exit-code \ + --from-filenames=testrun-changed-files.txt + + - name: Run Core/Changed Tests + id: run-core-changed-tests + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] == false }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- \ - --run-slow --suppress-no-test-exit-code --from-filenames=testrun-changed-files.txt + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests --suppress-no-test-exit-code \ + --from-filenames=testrun-changed-files.txt - name: Run Fast Tests id: run-fast-tests - if: ${{ fromJSON(inputs.testrun)['type'] != 'full' }} + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['fast'] }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ + ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} + + - name: Run Slow Tests + id: run-slow-tests + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['slow'] }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ matrix.tests-chunk }} -- --no-fast-tests --slow-tests + + - name: Run Core Tests + id: run-core-tests + if: ${{ fromJSON(inputs.testrun)['type'] != 'full' && fromJSON(inputs.testrun)['selected_tests']['core'] }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ matrix.tests-chunk }} -- --no-fast-tests --core-tests + + - name: Run Flaky Tests + id: run-flaky-tests + if: ${{ fromJSON(inputs.testrun)['selected_tests']['flaky'] }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ matrix.tests-chunk }} -- --no-fast-tests --flaky-jail - name: Run Full Tests id: run-full-tests if: ${{ fromJSON(inputs.testrun)['type'] == 'full' }} run: | tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ - --nox-session=${{ env.NOX_SESSION }} --rerun-failures ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ - ${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --run-slow + --nox-session=${{ inputs.nox-session }} --rerun-failures -E SALT_TRANSPORT ${{ (inputs.skip-code-coverage && matrix.tests-chunk != 'unit') && '--skip-code-coverage' || '' }} \ + -E TEST_GROUP ${{ matrix.fips && '--fips ' || '' }}${{ inputs.distro-slug }} ${{ matrix.tests-chunk }} -- --slow-tests --core-tests \ + --test-group-count=${{ matrix.test-group-count || 1 }} --test-group=${{ matrix.test-group || 1 }} - name: Combine Coverage Reports - if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' + if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' run: | tools --timestamps vm combine-coverage ${{ inputs.distro-slug }} - name: Download Test Run Artifacts id: download-artifacts-from-vm - if: always() && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' + if: always() && steps.spin-up-vm.outcome == 'success' run: | tools --timestamps vm download-artifacts ${{ inputs.distro-slug }} # Delete the salt onedir, we won't need it anymore and it will prevent @@ -328,107 +294,84 @@ jobs: rm -rf artifacts/salt* tree -a artifacts if [ "${{ inputs.skip-code-coverage }}" != "true" ]; then - mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }} - echo "COVERAGE_FILE=artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ env.NOX_SESSION }}.${{ matrix.tests-chunk }}" >> GITHUB_ENV + mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }}.${{ matrix.transport }}.${{ matrix.tests-chunk }}.grp${{ matrix.test-group || '1' }} fi - name: Destroy VM if: always() run: | - tools --timestamps vm destroy ${{ inputs.distro-slug }} || true + tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true - name: Upload Code Coverage Test Run Artifacts if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' uses: actions/upload-artifact@v3 with: - name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }} path: | artifacts/coverage/ - name: Upload JUnit XML Test Run Artifacts - if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }} path: | artifacts/xml-unittests-output/ - name: Upload Test Run Log Artifacts - if: always() && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + name: testrun-log-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }}-${{ matrix.transport }} path: | artifacts/logs + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && inputs.skip-junit-reports == false && job.status != 'cancelled' + with: + check_name: Test Results(${{ inputs.distro-slug }}, transport=${{ matrix.transport }}, tests-chunk=${{ matrix.tests-chunk }}, group=${{ matrix.test-group || '1' }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true + report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.transport }}) - if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped' + name: Test Reports + if: always() && inputs.skip-code-coverage == false && needs.test.result != 'cancelled' && needs.test.result != 'skipped' runs-on: - self-hosted - linux - x86_64 needs: - - generate-matrix - test - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.transport-matrix-include) }} steps: - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Define Nox Session - run: | - if [ "${{ matrix.transport }}" != "tcp" ]; then - echo NOX_SESSION=${{ inputs.nox-session }} >> "$GITHUB_ENV" - else - echo NOX_SESSION=${{ inputs.nox-session }}-tcp >> "$GITHUB_ENV" - fi + uses: actions/checkout@v4 - name: Download Code Coverage Test Run Artifacts uses: actions/download-artifact@v3 if: ${{ inputs.skip-code-coverage == false }} id: download-coverage-artifacts with: - name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} + name: testrun-coverage-artifacts-${{ inputs.distro-slug }}-${{ inputs.nox-session }} path: artifacts/coverage/ - - name: Download JUnit XML Test Run Artifacts - uses: actions/download-artifact@v3 - id: download-junit-artifacts - with: - name: testrun-junit-artifacts-${{ inputs.distro-slug }}-${{ env.NOX_SESSION }} - path: artifacts/xml-unittests-output/ - - name: Show Downloaded Test Run Artifacts run: | tree -a artifacts - - name: Set up Python 3.9 - uses: actions/setup-python@v4 - with: - python-version: "3.9" - - name: Install Nox run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + python3 -m pip install 'nox==${{ inputs.nox-version }}' - - name: Combine Code Coverage - if: ${{ inputs.skip-code-coverage == false }} - continue-on-error: true + - name: Create XML Coverage Reports + if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' && job.status != 'cancelled' run: | - nox --force-color -e combine-coverage - - - name: Upload Code Coverage DB - if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' - uses: actions/upload-artifact@v3 - with: - name: code-coverage - path: artifacts/coverage + nox --force-color -e create-xml-coverage-reports + mv artifacts/coverage/salt.xml artifacts/coverage/salt..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml + mv artifacts/coverage/tests.xml artifacts/coverage/tests..${{ inputs.distro-slug }}..${{ inputs.nox-session }}.xml - name: Report Salt Code Coverage if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' @@ -436,23 +379,21 @@ jobs: run: | nox --force-color -e report-coverage -- salt - - name: Report Tests Code Coverage + - name: Report Combined Code Coverage if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' continue-on-error: true run: | - nox --force-color -e report-coverage -- tests + nox --force-color -e report-coverage - - name: Report Combined Code Coverage + - name: Rename Code Coverage DB if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' continue-on-error: true run: | - nox --force-color -e report-coverage + mv artifacts/coverage/.coverage artifacts/coverage/.coverage.${{ inputs.distro-slug }}.${{ inputs.nox-session }} - - name: Publish Test Report - uses: mikepenz/action-junit-report@v3 - # always run even if the previous steps fails - if: always() && inputs.skip-junit-reports == false && steps.download-junit-artifacts.outcome == 'success' + - name: Upload Code Coverage DB + if: always() && inputs.skip-code-coverage == false && steps.download-coverage-artifacts.outcome == 'success' + uses: actions/upload-artifact@v3 with: - check_name: Test Results(${{ inputs.distro-slug }}) - report_paths: 'artifacts/xml-unittests-output/*.xml' - annotate_only: true + name: all-testrun-coverage-artifacts + path: artifacts/coverage diff --git a/.github/workflows/test-package-downloads-action.yml b/.github/workflows/test-package-downloads-action.yml new file mode 100644 index 000000000000..6bed0c6a01c3 --- /dev/null +++ b/.github/workflows/test-package-downloads-action.yml @@ -0,0 +1,888 @@ +name: Test Download Packages + +on: + workflow_call: + inputs: + salt-version: + type: string + required: true + description: The Salt version of the packages to install and test + cache-prefix: + required: true + type: string + description: Seed used to invalidate caches + environment: + required: true + type: string + description: The environment to run tests against + latest-release: + required: true + type: string + description: The latest salt release + nox-version: + required: true + type: string + description: The nox version to install + python-version: + required: false + type: string + description: The python version to run tests with + default: "3.10" + package-name: + required: false + type: string + description: The onedir package name to use + default: salt + skip-code-coverage: + required: false + type: boolean + description: Skip code coverage + default: false + nox-session: + required: false + type: string + description: The nox session to run + default: ci-test-onedir + +env: + COLUMNS: 190 + AWS_MAX_ATTEMPTS: "10" + AWS_RETRY_MODE: "adaptive" + PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ + PIP_EXTRA_INDEX_URL: https://pypi.org/simple + +jobs: + + linux: + name: Linux + runs-on: + - self-hosted + - linux + - bastion + environment: ${{ inputs.environment }} + timeout-minutes: 120 # 2 Hours - More than this and something is wrong + strategy: + fail-fast: false + matrix: + include: + - distro-slug: almalinux-8 + arch: x86_64 + pkg-type: package + - distro-slug: almalinux-8-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: almalinux-8-arm64 + arch: arm64 + pkg-type: package + - distro-slug: almalinux-9 + arch: x86_64 + pkg-type: package + - distro-slug: almalinux-9-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: almalinux-9-arm64 + arch: arm64 + pkg-type: package + - distro-slug: amazonlinux-2 + arch: x86_64 + pkg-type: package + - distro-slug: amazonlinux-2-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: amazonlinux-2-arm64 + arch: arm64 + pkg-type: package + - distro-slug: amazonlinux-2023 + arch: x86_64 + pkg-type: package + - distro-slug: amazonlinux-2023-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: amazonlinux-2023-arm64 + arch: arm64 + pkg-type: package + - distro-slug: centos-7 + arch: x86_64 + pkg-type: package + - distro-slug: centos-7-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: centos-7-arm64 + arch: arm64 + pkg-type: package + - distro-slug: centosstream-8 + arch: x86_64 + pkg-type: package + - distro-slug: centosstream-8-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: centosstream-8-arm64 + arch: arm64 + pkg-type: package + - distro-slug: centosstream-9 + arch: x86_64 + pkg-type: package + - distro-slug: centosstream-9-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: centosstream-9-arm64 + arch: arm64 + pkg-type: package + - distro-slug: debian-10 + arch: x86_64 + pkg-type: package + - distro-slug: debian-11 + arch: x86_64 + pkg-type: package + - distro-slug: debian-11-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: debian-12 + arch: x86_64 + pkg-type: package + - distro-slug: debian-12-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: fedora-37 + arch: x86_64 + pkg-type: package + - distro-slug: fedora-37-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: fedora-37-arm64 + arch: arm64 + pkg-type: package + - distro-slug: fedora-38 + arch: x86_64 + pkg-type: package + - distro-slug: fedora-38-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: fedora-38-arm64 + arch: arm64 + pkg-type: package + - distro-slug: photonos-3 + arch: x86_64 + pkg-type: package + - distro-slug: photonos-3-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: photonos-3-arm64 + arch: arm64 + pkg-type: package + - distro-slug: photonos-4 + arch: x86_64 + pkg-type: package + - distro-slug: photonos-4-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: photonos-4-arm64 + arch: arm64 + pkg-type: package + - distro-slug: photonos-5 + arch: x86_64 + pkg-type: package + - distro-slug: photonos-5-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: photonos-5-arm64 + arch: arm64 + pkg-type: package + - distro-slug: ubuntu-20.04 + arch: x86_64 + pkg-type: package + - distro-slug: ubuntu-20.04-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: ubuntu-22.04 + arch: x86_64 + pkg-type: package + - distro-slug: ubuntu-22.04-arm64 + arch: aarch64 + pkg-type: package + - distro-slug: ubuntu-22.04 + arch: x86_64 + pkg-type: onedir + - distro-slug: ubuntu-22.04-arm64 + arch: aarch64 + pkg-type: onedir + + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" + + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Download Onedir Tarball as an Artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz + path: artifacts/ + + - name: Decompress Onedir Tarball + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-linux-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz + + - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3.3.1 + with: + path: nox.${{ matrix.distro-slug }}.tar.* + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-linux + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }} + + - name: List Free Space + run: | + tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + run: | + tools --timestamps vm rsync ${{ matrix.distro-slug }} + + - name: Decompress .nox Directory + run: | + tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }} + + - name: Show System Info & Test Plan + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ + --nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs + + - name: Run Package Download Tests + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ + --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs + + - name: Combine Coverage Reports + if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' + run: | + tools --timestamps vm combine-coverage ${{ matrix.distro-slug }} + + - name: Download Test Run Artifacts + id: download-artifacts-from-vm + if: always() && steps.spin-up-vm.outcome == 'success' + run: | + tools --timestamps vm download-artifacts ${{ matrix.distro-slug }} + # Delete the salt onedir, we won't need it anymore and it will prevent + # from it showing in the tree command below + rm -rf artifacts/salt* + tree -a artifacts + + - name: Destroy VM + if: always() + run: | + tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + + - name: Install Codecov CLI + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + + - name: Upload Source Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/salt.xml ]; then + echo "The artifacts/coverage/salt.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/salt.xml \ + # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/salt.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags salt,${{ matrix.distro-slug }},pkg \ + --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Tests Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/tests.xml ]; then + echo "The artifacts/coverage/tests.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/tests.xml \ + # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/tests.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags tests,${{ matrix.distro-slug }},pkg \ + --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Test Run Artifacts + if: always() && steps.download-artifacts-from-vm.outcome == 'success' + uses: actions/upload-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }} + path: | + artifacts + !artifacts/salt/* + !artifacts/salt-*.tar.* + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && job.status != 'cancelled' && steps.download-artifacts-from-vm.outcome == 'success' + with: + check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true + + + macos: + name: MacOS + runs-on: ${{ matrix.distro-slug }} + environment: ${{ inputs.environment }} + timeout-minutes: 120 # 2 Hours - More than this and something is wrong + strategy: + fail-fast: false + matrix: + include: + - distro-slug: macos-12 + arch: x86_64 + pkg-type: package + - distro-slug: macos-13 + arch: x86_64 + pkg-type: package + - distro-slug: macos-13-xlarge + arch: arm64 + pkg-type: package + - distro-slug: macos-13-xlarge + arch: aarch64 + pkg-type: onedir + + steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Download Onedir Tarball as an Artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz + path: artifacts/ + + - name: Install System Dependencies + run: | + brew install tree + + - name: Decompress Onedir Tarball + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-darwin-${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}.tar.xz + + - name: Set up Python ${{ inputs.python-version }} + uses: actions/setup-python@v4 + with: + python-version: "${{ inputs.python-version }}" + update-environment: true + + - name: Install Nox + run: | + python3 -m pip install 'nox==${{ inputs.nox-version }}' + + - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3.3.1 + with: + path: nox.${{ matrix.distro-slug }}.tar.* + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch == 'arm64' && 'aarch64' || matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true + + - name: Decompress .nox Directory + run: | + nox --force-color -e decompress-dependencies -- ${{ matrix.distro-slug }} + + - name: Show System Info & Test Plan + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "1" + PRINT_TEST_PLAN_ONLY: "1" + PRINT_SYSTEM_INFO: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs + + - name: Run Package Download Tests + env: + SKIP_REQUIREMENTS_INSTALL: "1" + PRINT_TEST_SELECTION: "0" + PRINT_TEST_PLAN_ONLY: "0" + PRINT_SYSTEM_INFO: "0" + RERUN_FAILURES: "1" + GITHUB_ACTIONS_PIPELINE: "1" + SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + COVERAGE_CONTEXT: ${{ matrix.distro-slug }} + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- download-pkgs + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + + - name: Combine Coverage Reports + if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + run: | + nox --force-color -e combine-coverage + + - name: Prepare Test Run Artifacts + id: download-artifacts-from-vm + if: always() && job.status != 'cancelled' + run: | + # Delete the salt onedir, we won't need it anymore and it will prevent + # from it showing in the tree command below + rm -rf artifacts/salt* + tree -a artifacts + + - name: Install Codecov CLI + if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/macos/codecov + curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/macos/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + + - name: Upload Source Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/salt.xml ]; then + echo "The artifacts/coverage/salt.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/salt.xml \ + # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/salt.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags salt,${{ matrix.distro-slug }},pkg \ + --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Tests Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/tests.xml ]; then + echo "The artifacts/coverage/tests.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/tests.xml \ + # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/tests.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags tests,${{ matrix.distro-slug }},pkg \ + --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Test Run Artifacts + if: always() + uses: actions/upload-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }} + path: | + artifacts + !artifacts/salt/* + !artifacts/salt-*.tar.* + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && job.status != 'cancelled' + with: + check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }}) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true + + + windows: + name: Windows + runs-on: + - self-hosted + - linux + - bastion + environment: ${{ inputs.environment }} + timeout-minutes: 120 # 2 Hours - More than this and something is wrong + strategy: + fail-fast: false + matrix: + include: + - distro-slug: windows-2022 + arch: amd64 + pkg-type: nsis + - distro-slug: windows-2022 + arch: amd64 + pkg-type: msi + - distro-slug: windows-2022 + arch: amd64 + pkg-type: onedir + + steps: + - name: Checkout Source Code + uses: actions/checkout@v4 + + - name: Download Onedir Tarball as an Artifact + uses: actions/download-artifact@v3 + with: + name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz + path: artifacts/ + + - name: Decompress Onedir Tarball + shell: bash + run: | + python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" + cd artifacts + tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-windows-${{ matrix.arch }}.tar.xz + + - name: Download cached nox.${{ matrix.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3.3.1 + with: + path: nox.${{ matrix.distro-slug }}.tar.* + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ matrix.arch }}|${{ matrix.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true + + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }}-pkg-download-windows + + - name: Get Salt Project GitHub Actions Bot Environment + run: | + TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") + SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) + echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" + + - name: Start VM + id: spin-up-vm + run: | + tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ matrix.distro-slug }} + + - name: List Free Space + run: | + tools --timestamps vm ssh ${{ matrix.distro-slug }} -- df -h || true + + - name: Upload Checkout To VM + run: | + tools --timestamps vm rsync ${{ matrix.distro-slug }} + + - name: Decompress .nox Directory + run: | + tools --timestamps vm decompress-dependencies ${{ matrix.distro-slug }} + + - name: Show System Info & Test Plan + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ + -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ + --nox-session=${{ inputs.nox-session }}-pkgs ${{ matrix.distro-slug }} -- download-pkgs + + - name: Run Package Download Tests + env: + SALT_RELEASE: "${{ inputs.salt-version }}" + SALT_REPO_ARCH: ${{ matrix.arch }} + LATEST_SALT_RELEASE: "${{ inputs.latest-release }}" + SALT_REPO_TYPE: ${{ inputs.environment }} + SALT_REPO_USER: ${{ secrets.SALT_REPO_USER }} + SALT_REPO_PASS: ${{ secrets.SALT_REPO_PASS }} + SALT_REPO_DOMAIN_RELEASE: ${{ vars.SALT_REPO_DOMAIN_RELEASE || 'repo.saltproject.io' }} + SALT_REPO_DOMAIN_STAGING: ${{ vars.SALT_REPO_DOMAIN_STAGING || 'staging.repo.saltproject.io' }} + SKIP_CODE_COVERAGE: "${{ inputs.skip-code-coverage && '1' || '0' }}" + DOWNLOAD_TEST_PACKAGE_TYPE: ${{ matrix.pkg-type }} + run: | + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install \ + -E SALT_RELEASE -E SALT_REPO_ARCH -E SALT_REPO_TYPE -E SALT_REPO_USER -E SALT_REPO_PASS \ + -E SALT_REPO_DOMAIN_RELEASE -E SALT_REPO_DOMAIN_STAGING -E LATEST_SALT_RELEASE -E DOWNLOAD_TEST_PACKAGE_TYPE \ + --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ matrix.distro-slug }} -- download-pkgs + + - name: Combine Coverage Reports + if: always() && inputs.skip-code-coverage == false && steps.spin-up-vm.outcome == 'success' && job.status != 'cancelled' + run: | + tools --timestamps vm combine-coverage ${{ matrix.distro-slug }} + + - name: Download Test Run Artifacts + id: download-artifacts-from-vm + if: always() && steps.spin-up-vm.outcome == 'success' + run: | + tools --timestamps vm download-artifacts ${{ matrix.distro-slug }} + # Delete the salt onedir, we won't need it anymore and it will prevent + # from it showing in the tree command below + rm -rf artifacts/salt* + tree -a artifacts + + - name: Destroy VM + if: always() + run: | + tools --timestamps vm destroy --no-wait ${{ matrix.distro-slug }} || true + + - name: Fix file ownership + run: | + sudo chown -R "$(id -un)" . + + - name: Install Codecov CLI + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + # We can't yet use tokenless uploads with the codecov CLI + # python3 -m pip install codecov-cli + # + curl https://keybase.io/codecovsecurity/pgp_keys.asc | gpg --no-default-keyring --import + curl -Os https://uploader.codecov.io/latest/linux/codecov + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM + curl -Os https://uploader.codecov.io/latest/linux/codecov.SHA256SUM.sig + gpg --verify codecov.SHA256SUM.sig codecov.SHA256SUM + shasum -a 256 -c codecov.SHA256SUM + chmod +x codecov + + - name: Upload Source Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/salt.xml ]; then + echo "The artifacts/coverage/salt.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/salt.xml \ + # --flag salt --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/salt.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags salt,${{ matrix.distro-slug }},pkg \ + --name salt.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Tests Code Coverage To Codecov + if: always() && inputs.skip-code-coverage == false && steps.download-artifacts-from-vm.outcome == 'success' && job.status != 'cancelled' + run: | + if [ ! -s artifacts/coverage/tests.xml ]; then + echo "The artifacts/coverage/tests.xml file does not exist" + exit 1 + fi + # We can't yet use tokenless uploads with the codecov CLI + #codecovcli --auto-load-params-from GithubActions --verbose --token ${{ secrets.CODECOV_TOKEN }} \ + # do-upload --git-service github --sha ${{ github.sha }} \ + # --file artifacts/coverage/tests.xml \ + # --flag tests --flag ${{ matrix.distro-slug }} --flag pkg \ + # --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs + n=0 + until [ "$n" -ge 5 ] + do + if ./codecov --file artifacts/coverage/tests.xml \ + --sha ${{ github.event.pull_request.head.sha || github.sha }} ${{ github.event_name == 'pull_request' && format('--parent {0}', github.event.pull_request.base.sha) }} \ + --flags tests,${{ matrix.distro-slug }},pkg \ + --name tests.${{ matrix.distro-slug }}.${{ inputs.nox-session }}.download-pkgs --nonZero; then + rc=$? + break + fi + rc=$? + n=$((n+1)) + sleep 15 + done + if [ "$rc" -ne 0 ]; then + echo "Failed to upload codecov stats" + exit 1 + fi + + - name: Upload Test Run Artifacts + if: always() && steps.download-artifacts-from-vm.outcome == 'success' + uses: actions/upload-artifact@v3 + with: + name: pkg-testrun-artifacts-${{ matrix.distro-slug }}-${{ matrix.arch }} + path: | + artifacts + !artifacts/salt/* + !artifacts/salt-*.tar.* + + - name: Publish Test Report + uses: mikepenz/action-junit-report@v3 + # always run even if the previous steps fails + if: always() && job.status != 'cancelled' && steps.download-artifacts-from-vm.outcome == 'success' + with: + check_name: Overall Test Results(${{ matrix.distro-slug }} ${{ matrix.arch }} ${{ matrix.pkg-type }} ) + report_paths: 'artifacts/xml-unittests-output/*.xml' + annotate_only: true diff --git a/.github/workflows/test-packages-action-macos.yml b/.github/workflows/test-packages-action-macos.yml index 144b7c403eb9..4dac7599e348 100644 --- a/.github/workflows/test-packages-action-macos.yml +++ b/.github/workflows/test-packages-action-macos.yml @@ -27,16 +27,29 @@ on: required: true type: string description: Seed used to invalidate caches + testing-releases: + required: true + type: string + description: A JSON list of releases to test upgrades against + nox-version: + required: true + type: string + description: The nox version to install python-version: required: false type: string description: The python version to run tests with - default: "3.9" + default: "3.10" package-name: required: false type: string description: The onedir package name to use default: salt + nox-session: + required: false + type: string + description: The nox session to run + default: ci-test-onedir skip-code-coverage: required: false type: boolean @@ -48,116 +61,47 @@ on: description: Skip Publishing JUnit Reports default: false - env: - NOX_VERSION: "2022.8.7" - COLUMNS: 160 - AWS_MAX_ATTEMPTS: "10" - AWS_RETRY_MODE: "adaptive" + COLUMNS: 190 PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ PIP_EXTRA_INDEX_URL: https://pypi.org/simple jobs: generate-matrix: - name: Generate Package Test Matrix - runs-on: ubuntu-latest + name: Generate Matrix + runs-on: + - self-hosted + - linux + - x86_64 outputs: pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }} steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - - name: Generate Package Test Matrix - id: generate-pkg-matrix + - name: "Throttle Builds" + shell: bash run: | - PKG_MATRIX=$(tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }}) - echo "$PKG_MATRIX" - echo "matrix=$PKG_MATRIX" >> "$GITHUB_OUTPUT" + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" - dependencies: - name: Setup Test Dependencies - needs: - - generate-matrix - runs-on: ${{ inputs.distro-slug }} - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} - steps: - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ matrix.nox-session }} - id: nox-dependencies-cache - uses: actions/cache@v3 - with: - path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ matrix.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - - - name: Download Onedir Tarball as an Artifact - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/ + uses: actions/checkout@v4 - - name: Decompress Onedir Tarball - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - - - name: Set up Python ${{ inputs.python-version }} - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/setup-python@v4 + - name: Setup Python Tools Scripts + uses: ./.github/actions/setup-python-tools-scripts with: - python-version: "${{ inputs.python-version }}" + cache-prefix: ${{ inputs.cache-prefix }} - - name: Install System Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - brew install openssl@3 - - - name: Install Nox - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + - name: Generate Package Test Matrix + id: generate-pkg-matrix run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }} - - name: Install Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - env: - PRINT_TEST_SELECTION: "0" - PRINT_SYSTEM_INFO: "0" - run: | - export PYCURL_SSL_LIBRARY=openssl - export LDFLAGS="-L/usr/local/opt/openssl@3/lib" - export CPPFLAGS="-I/usr/local/opt/openssl@3/include" - export PKG_CONFIG_PATH="/usr/local/opt/openssl@3/lib/pkgconfig" - nox --install-only -e ${{ matrix.nox-session }} - - - name: Cleanup .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - nox -e "pre-archive-cleanup(pkg=False)" - - - name: Compress .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - nox -e compress-dependencies -- ${{ inputs.distro-slug }} test: name: Test runs-on: ${{ inputs.distro-slug }} timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - - dependencies - generate-matrix strategy: fail-fast: false @@ -165,14 +109,20 @@ jobs: include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + - name: Checkout Source Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download Packages uses: actions/download-artifact@v3 with: name: salt-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }} - path: pkg/artifacts/ + path: artifacts/pkg/ - name: Install System Dependencies run: | @@ -180,7 +130,7 @@ jobs: - name: List Packages run: | - tree pkg/artifacts + tree artifacts/pkg/ - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 @@ -202,17 +152,21 @@ jobs: - name: Install Nox run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + python3 -m pip install 'nox==${{ inputs.nox-version }}' - - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ matrix.nox-session }} - uses: actions/cache@v3 + - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3.3.1 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ matrix.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Decompress .nox Directory run: | - nox -e decompress-dependencies -- ${{ inputs.distro-slug }} + nox --force-color -e decompress-dependencies -- ${{ inputs.distro-slug }} - name: Show System Info & Test Plan env: @@ -223,7 +177,8 @@ jobs: GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" run: | - sudo -E nox -e ${{ matrix.nox-session }} + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \ + ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests env: @@ -234,8 +189,10 @@ jobs: RERUN_FAILURES: "1" GITHUB_ACTIONS_PIPELINE: "1" SKIP_INITIAL_GH_ACTIONS_FAILURES: "1" + COVERAGE_CONTEXT: ${{ inputs.distro-slug }} run: | - sudo -E nox -e ${{ matrix.nox-session }} + sudo -E nox --force-color -e ${{ inputs.nox-session }}-pkgs -- ${{ matrix.tests-chunk }} \ + ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Fix file ownership run: | @@ -243,7 +200,7 @@ jobs: - name: Prepare Test Run Artifacts id: download-artifacts-from-vm - if: always() && job.status != 'cancelled' + if: always() run: | # Delete the salt onedir, we won't need it anymore and it will prevent # from it showing in the tree command below @@ -251,18 +208,19 @@ jobs: tree -a artifacts - name: Upload Test Run Artifacts - if: always() && job.status != 'cancelled' + if: always() uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.nox-session }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: | artifacts + !artifacts/pkg/* !artifacts/salt/* !artifacts/salt-*.tar.* report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.nox-session }}) - runs-on: ubuntu-latest + name: Report + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} if: always() && (inputs.skip-code-coverage == false || inputs.skip-junit-reports == false) && needs.test.result != 'cancelled' && needs.test.result != 'skipped' needs: - test @@ -274,13 +232,13 @@ jobs: steps: - name: Checkout Source Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download Test Run Artifacts id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.nox-session }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: artifacts - name: Show Test Run Artifacts @@ -288,20 +246,20 @@ jobs: run: | tree -a artifacts - - name: Set up Python 3.9 + - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v4 with: - python-version: "3.9" + python-version: "${{ inputs.python-version }}" - name: Install Nox run: | - python3 -m pip install 'nox==${{ env.NOX_VERSION }}' + python3 -m pip install 'nox==${{ inputs.nox-version }}' - name: Publish Test Report uses: mikepenz/action-junit-report@v3 # always run even if the previous steps fails if: always() && inputs.skip-junit-reports == false && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.nox-session }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.github/workflows/test-packages-action.yml b/.github/workflows/test-packages-action.yml index ebb111556529..fb77b855693c 100644 --- a/.github/workflows/test-packages-action.yml +++ b/.github/workflows/test-packages-action.yml @@ -27,11 +27,34 @@ on: required: true type: string description: Seed used to invalidate caches + testing-releases: + required: true + type: string + description: A JSON list of releases to test upgrades against + nox-version: + required: true + type: string + description: The nox version to install + python-version: + required: false + type: string + description: The python version to run tests with + default: "3.10" + fips: + required: false + type: boolean + default: false + description: Test run with FIPS enabled package-name: required: false type: string description: The onedir package name to use default: salt + nox-session: + required: false + type: string + description: The nox session to run + default: ci-test-onedir skip-code-coverage: required: false type: boolean @@ -43,10 +66,8 @@ on: description: Skip Publishing JUnit Reports default: false - env: - NOX_VERSION: "2022.8.7" - COLUMNS: 160 + COLUMNS: 190 AWS_MAX_ATTEMPTS: "10" AWS_RETRY_MODE: "adaptive" PIP_INDEX_URL: https://pypi-proxy.saltstack.net/root/local/+simple/ @@ -55,7 +76,7 @@ env: jobs: generate-matrix: - name: Generate Package Test Matrix + name: Generate Matrix runs-on: - self-hosted - linux @@ -63,109 +84,26 @@ jobs: outputs: pkg-matrix-include: ${{ steps.generate-pkg-matrix.outputs.matrix }} steps: - - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Setup Python Tools Scripts - uses: ./.github/actions/setup-python-tools-scripts - - name: Generate Package Test Matrix - id: generate-pkg-matrix + - name: "Throttle Builds" + shell: bash run: | - PKG_MATRIX=$(tools ci pkg-matrix ${{ inputs.distro-slug }} ${{ inputs.pkg-type }}) - echo "$PKG_MATRIX" - echo "matrix=$PKG_MATRIX" >> "$GITHUB_OUTPUT" + t=$(shuf -i 1-30 -n 1); echo "Sleeping $t seconds"; sleep "$t" - dependencies: - name: Setup Test Dependencies - needs: - - generate-matrix - runs-on: - - self-hosted - - linux - - bastion - timeout-minutes: 90 - strategy: - fail-fast: false - matrix: - include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} - steps: - name: Checkout Source Code - uses: actions/checkout@v3 - - - name: Cache nox.${{ inputs.distro-slug }}.tar.* for session ${{ matrix.nox-session }} - id: nox-dependencies-cache - uses: actions/cache@v3 - with: - path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ matrix.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} - - - name: Download Onedir Tarball as an Artifact - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - uses: actions/download-artifact@v3 - with: - name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz - path: artifacts/ - - - name: Decompress Onedir Tarball - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - shell: bash - run: | - python3 -c "import os; os.makedirs('artifacts', exist_ok=True)" - cd artifacts - tar xvf ${{ inputs.package-name }}-${{ inputs.salt-version }}-onedir-${{ inputs.platform }}-${{ inputs.arch }}.tar.xz + uses: actions/checkout@v4 - name: Setup Python Tools Scripts - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - - name: Get Salt Project GitHub Actions Bot Environment - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - TOKEN=$(curl -sS -f -X PUT "http://169.254.169.254/latest/api/token" -H "X-aws-ec2-metadata-token-ttl-seconds: 30") - SPB_ENVIRONMENT=$(curl -sS -f -H "X-aws-ec2-metadata-token: $TOKEN" http://169.254.169.254/latest/meta-data/tags/instance/spb:environment) - echo "SPB_ENVIRONMENT=$SPB_ENVIRONMENT" >> "$GITHUB_ENV" - - - name: Start VM - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - id: spin-up-vm - run: | - tools --timestamps vm create --environment "${SPB_ENVIRONMENT}" --retries=2 ${{ inputs.distro-slug }} - - - name: List Free Space - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm ssh ${{ inputs.distro-slug }} -- df -h || true - - - name: Upload Checkout To VM - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm rsync ${{ inputs.distro-slug }} - - - name: Install Dependencies - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm install-dependencies --nox-session=${{ matrix.nox-session }} ${{ inputs.distro-slug }} - - - name: Cleanup .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm pre-archive-cleanup ${{ inputs.distro-slug }} - - - name: Compress .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm compress-dependencies ${{ inputs.distro-slug }} - - - name: Download Compressed .nox Directory - if: steps.nox-dependencies-cache.outputs.cache-hit != 'true' + - name: Generate Package Test Matrix + id: generate-pkg-matrix run: | - tools --timestamps vm download-dependencies ${{ inputs.distro-slug }} + tools ci pkg-matrix ${{ inputs.fips && '--fips ' || '' }}${{ inputs.distro-slug }} \ + ${{ inputs.pkg-type }} --testing-releases ${{ join(fromJSON(inputs.testing-releases), ' ') }} - - name: Destroy VM - if: always() && steps.nox-dependencies-cache.outputs.cache-hit != 'true' - run: | - tools --timestamps vm destroy ${{ inputs.distro-slug }} test: name: Test @@ -176,21 +114,26 @@ jobs: timeout-minutes: 120 # 2 Hours - More than this and something is wrong needs: - generate-matrix - - dependencies strategy: fail-fast: false matrix: include: ${{ fromJSON(needs.generate-matrix.outputs.pkg-matrix-include) }} steps: + + - name: "Throttle Builds" + shell: bash + run: | + t=$(python3 -c 'import random, sys; sys.stdout.write(str(random.randint(1, 15)))'); echo "Sleeping $t seconds"; sleep "$t" + - name: Checkout Source Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download Packages uses: actions/download-artifact@v3 with: name: ${{ inputs.package-name }}-${{ inputs.salt-version }}-${{ inputs.arch }}-${{ inputs.pkg-type }} - path: pkg/artifacts/ + path: artifacts/pkg/ - name: Download Onedir Tarball as an Artifact uses: actions/download-artifact@v3 @@ -207,16 +150,22 @@ jobs: - name: List Packages run: | - tree pkg/artifacts + tree artifacts/pkg/ - - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ matrix.nox-session }} - uses: actions/cache@v3 + - name: Download cached nox.${{ inputs.distro-slug }}.tar.* for session ${{ inputs.nox-session }} + uses: actions/cache@v3.3.1 with: path: nox.${{ inputs.distro-slug }}.tar.* - key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.distro-slug }}|${{ matrix.nox-session }}|${{ hashFiles('requirements/**/*.txt', 'cicd/golden-images.json') }} + key: ${{ inputs.cache-prefix }}|testrun-deps|${{ inputs.arch }}|${{ inputs.distro-slug }}|${{ inputs.nox-session }}|${{ inputs.python-version }}|${{ + hashFiles('requirements/**/*.txt', 'cicd/golden-images.json', 'noxfile.py') + }} + # If we get a cache miss here it means the dependencies step failed to save the cache + fail-on-cache-miss: true - name: Setup Python Tools Scripts uses: ./.github/actions/setup-python-tools-scripts + with: + cache-prefix: ${{ inputs.cache-prefix }} - name: Get Salt Project GitHub Actions Bot Environment run: | @@ -241,15 +190,23 @@ jobs: run: | tools --timestamps vm decompress-dependencies ${{ inputs.distro-slug }} + - name: Downgrade importlib-metadata + if: ${{ contains(fromJSON('["amazonlinux-2", "centos-7", "debian-10"]'), inputs.distro-slug) && contains(fromJSON('["upgrade-classic", "downgrade-classic"]'), matrix.tests-chunk) }} + run: | + # This step can go away once we stop testing classic packages upgrade/downgrades to/from 3005.x + tools --timestamps vm ssh ${{ inputs.distro-slug }} -- "sudo python3 -m pip install -U 'importlib-metadata<=4.13.0' 'virtualenv<=20.21.1'" + - name: Show System Info & Test Plan run: | tools --timestamps --timeout-secs=1800 vm testplan --skip-requirements-install \ - --nox-session=${{ matrix.nox-session }} ${{ inputs.distro-slug }} + --nox-session=${{ inputs.nox-session }}-pkgs ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \ + ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Run Package Tests run: | - tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install\ - --nox-session=${{ matrix.nox-session }} --rerun-failures ${{ inputs.distro-slug }} + tools --timestamps --no-output-timeout-secs=1800 --timeout-secs=14400 vm test --skip-requirements-install ${{ matrix.fips && '--fips ' || '' }}\ + --nox-session=${{ inputs.nox-session }}-pkgs --rerun-failures ${{ inputs.distro-slug }} -- ${{ matrix.tests-chunk }} \ + ${{ matrix.version && format('--prev-version {0}', matrix.version) || ''}} - name: Download Test Run Artifacts id: download-artifacts-from-vm @@ -264,20 +221,21 @@ jobs: - name: Destroy VM if: always() run: | - tools --timestamps vm destroy ${{ inputs.distro-slug }} || true + tools --timestamps vm destroy --no-wait ${{ inputs.distro-slug }} || true - name: Upload Test Run Artifacts if: always() && steps.download-artifacts-from-vm.outcome == 'success' uses: actions/upload-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.nox-session }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: | artifacts + !artifacts/pkg/* !artifacts/salt/* !artifacts/salt-*.tar.* report: - name: Reports for ${{ inputs.distro-slug }}(${{ matrix.nox-session }}) + name: Report runs-on: - self-hosted - linux @@ -293,13 +251,13 @@ jobs: steps: - name: Checkout Source Code - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Download Test Run Artifacts id: download-test-run-artifacts uses: actions/download-artifact@v3 with: - name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.nox-session }} + name: pkg-testrun-artifacts-${{ inputs.distro-slug }}-${{ matrix.tests-chunk }} path: artifacts - name: Show Test Run Artifacts @@ -312,6 +270,6 @@ jobs: # always run even if the previous steps fails if: always() && inputs.skip-junit-reports == false && steps.download-test-run-artifacts.outcome == 'success' with: - check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.nox-session }}) + check_name: Overall Test Results(${{ inputs.distro-slug }} ${{ matrix.tests-chunk }}) report_paths: 'artifacts/xml-unittests-output/*.xml' annotate_only: true diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml index 92468bbc75f0..2971429a4dc3 100644 --- a/.github/workflows/triage.yml +++ b/.github/workflows/triage.yml @@ -22,10 +22,10 @@ jobs: issues: write pull-requests: read # for dawidd6/action-download-artifact to query commit hash name: Triage New Issue - runs-on: ubuntu-latest + runs-on: ${{ github.event.repository.private && fromJSON('["self-hosted", "linux", "x86_64"]') || 'ubuntu-latest' }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python uses: actions/setup-python@v4 diff --git a/.gitignore b/.gitignore index f4076ae84be1..e855a8261544 100644 --- a/.gitignore +++ b/.gitignore @@ -117,7 +117,6 @@ kitchen.local.yml .bundle/ Gemfile.lock /artifacts/ -/pkg/artifacts/ requirements/static/*/py*/*.log # Vim's default session file @@ -128,6 +127,7 @@ Session.vim # Nox requirements archives nox.*.tar.bzip2 +nox.*.tar.xz # Debian packages /debian @@ -138,8 +138,11 @@ nox.*.tar.bzip2 /pkg/debian/salt-api /pkg/debian/salt-common /pkg/debian/salt-cloud +/pkg/debian/salt-dbg /pkg/debian/salt-master /pkg/debian/salt-minion /pkg/debian/salt-ssh /pkg/debian/salt-syndic /pkg/debian/debhelper-build-stamp + +.tools-venvs diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 719edae7b7b8..0f926d986d5e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -46,32 +46,26 @@ repos: )$ - repo: https://github.com/s0undt3ch/python-tools-scripts - rev: "0.11.1" + rev: "0.18.6" hooks: - id: tools alias: check-changelog-entries name: Check Changelog Entries args: + - pre-commit - changelog - pre-commit-checks - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0 - - jinja2==3.1.2 - - packaging==23.0 + - id: tools alias: generate-workflows name: Generate GitHub Workflow Templates - files: ^(tools/pre_commit\.py|.github/workflows/templates/.*)$ + files: ^(cicd/shared-gh-workflows-context\.yml|tools/precommit/workflows\.py|.github/workflows/templates/.*)$ pass_filenames: false args: - pre-commit + - workflows - generate-workflows - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0 - - jinja2==3.1.2 - - packaging==23.0 + - id: tools alias: actionlint name: Lint GitHub Actions Workflows @@ -80,18 +74,77 @@ repos: - yaml args: - pre-commit + - workflows - actionlint - additional_dependencies: - - boto3==1.21.46 - - pyyaml==6.0 - - jinja2==3.1.2 - - packaging==23.0 + + - id: tools + alias: check-docs + name: Check Docs + files: ^(salt/.*\.py|doc/ref/.*\.rst)$ + args: + - pre-commit + - docs + - check + + - id: tools + alias: check-docstrings + name: Check docstrings + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - docstrings + - check + + - id: tools + alias: check-known-missing-docstrings + name: Check Known Missing Docstrings + stages: [manual] + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - docstrings + - check + + - id: tools + alias: loader-check-virtual + name: Check loader modules __virtual__ + files: salt/.*\.py$ + exclude: > + (?x)^( + templates/.*| + salt/ext/.*| + )$ + args: + - pre-commit + - salt-loaders + - check-virtual + + - id: tools + alias: check-filemap + name: Check Filename Map Change Matching + files: ^tests/(filename_map\.yml|.*\.py)$ + pass_filenames: false + args: + - pre-commit + - filemap + - check + + # ----- Packaging Requirements ------------------------------------------------------------------------------------> - repo: https://github.com/saltstack/pip-tools-compile-impersonate rev: "4.8" hooks: - # ----- Packaging Requirements ------------------------------------------------------------------------------------> - id: pip-tools-compile alias: compile-pkg-linux-3.7-zmq-requirements name: Linux Packaging Py3.7 ZeroMQ Requirements @@ -99,10 +152,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=linux - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/pkg/linux.in - id: pip-tools-compile @@ -112,10 +167,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=linux - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/pkg/linux.in - id: pip-tools-compile @@ -125,10 +182,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=linux - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/pkg/linux.in - id: pip-tools-compile @@ -138,10 +197,42 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=linux - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/linux.in + + - id: pip-tools-compile + alias: compile-pkg-linux-3.11-zmq-requirements + name: Linux Packaging Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(linux\.in|py3\.11/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/linux.in + + - id: pip-tools-compile + alias: compile-pkg-linux-3.12-zmq-requirements + name: Linux Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(linux\.in|py3\.12/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/pkg/linux.in - id: pip-tools-compile @@ -151,10 +242,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=freebsd - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/pkg/freebsd.in - id: pip-tools-compile @@ -164,10 +257,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=freebsd - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/pkg/freebsd.in - id: pip-tools-compile @@ -177,10 +272,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=freebsd - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/pkg/freebsd.in - id: pip-tools-compile @@ -190,10 +287,42 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=freebsd - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/freebsd.in + + - id: pip-tools-compile + alias: compile-pkg-freebsd-3.11-zmq-requirements + name: FreeBSD Packaging Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(freebsd\.in|py3\.11/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/pkg/freebsd.in + + - id: pip-tools-compile + alias: compile-pkg-freebsd-3.12-zmq-requirements + name: FreeBSD Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto)\.txt|static/pkg/(freebsd\.in|py3\.12/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/pkg/freebsd.in - id: pip-tools-compile @@ -203,9 +332,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=darwin - --include=requirements/darwin.txt + - --no-emit-index-url - requirements/static/pkg/darwin.in - id: pip-tools-compile @@ -215,9 +346,39 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=darwin - --include=requirements/darwin.txt + - --no-emit-index-url + - requirements/static/pkg/darwin.in + + - id: pip-tools-compile + alias: compile-pkg-darwin-3.11-zmq-requirements + name: Darwin Packaging Py3.11 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|crypto|darwin)\.txt|static/pkg/(darwin\.in|py3\.11/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=darwin + - --include=requirements/darwin.txt + - --no-emit-index-url + - requirements/static/pkg/darwin.in + + - id: pip-tools-compile + alias: compile-pkg-darwin-3.12-zmq-requirements + name: Darwin Packaging Py3.12 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|crypto|darwin)\.txt|static/pkg/(darwin\.in|py3\.12/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --include=requirements/darwin.txt + - --no-emit-index-url - requirements/static/pkg/darwin.in - id: pip-tools-compile @@ -227,9 +388,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=windows - --include=requirements/windows.txt + - --no-emit-index-url - requirements/static/pkg/windows.in - id: pip-tools-compile @@ -239,9 +402,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=windows - --include=requirements/windows.txt + - --no-emit-index-url - requirements/static/pkg/windows.in - id: pip-tools-compile @@ -251,9 +416,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=windows - --include=requirements/windows.txt + - --no-emit-index-url - requirements/static/pkg/windows.in - id: pip-tools-compile @@ -263,9 +430,39 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=windows - --include=requirements/windows.txt + - --no-emit-index-url + - requirements/static/pkg/windows.in + + - id: pip-tools-compile + alias: compile-pkg-windows-3.11-zmq-requirements + name: Windows Packaging Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.11/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=windows + - --include=requirements/windows.txt + - --no-emit-index-url + - requirements/static/pkg/windows.in + + - id: pip-tools-compile + alias: compile-pkg-windows-3.12-zmq-requirements + name: Windows Packaging Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|crypto|windows)\.txt|static/pkg/(windows\.in|py3\.12/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --include=requirements/windows.txt + - --no-emit-index-url - requirements/static/pkg/windows.in # <---- Packaging Requirements ------------------------------------------------------------------------------------- @@ -278,6 +475,7 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=linux - --include=requirements/base.txt @@ -285,16 +483,17 @@ repos: - --include=requirements/pytest.txt - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/linux.in - id: pip-tools-compile alias: compile-ci-linux-3.8-zmq-requirements name: Linux CI Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|pkg/py3\.8/linux\.txt))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.8/linux\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=linux - --include=requirements/base.txt @@ -302,16 +501,17 @@ repos: - --include=requirements/pytest.txt - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/linux.in - id: pip-tools-compile alias: compile-ci-linux-3.9-zmq-requirements name: Linux CI Py3.9 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|pkg/py3\.9/linux\.txt))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.9/linux\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=linux - --include=requirements/base.txt @@ -319,16 +519,17 @@ repos: - --include=requirements/pytest.txt - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/linux.in - id: pip-tools-compile alias: compile-ci-linux-3.10-zmq-requirements name: Linux CI Py3.10 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|pkg/py3\.10/linux\.txt))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.10/linux\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=linux - --include=requirements/base.txt @@ -336,7 +537,43 @@ repos: - --include=requirements/pytest.txt - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/linux.txt + - --no-emit-index-url + - requirements/static/ci/linux.in + + - id: pip-tools-compile + alias: compile-ci-linux-3.11-zmq-requirements + name: Linux CI Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.11/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/linux.in + + - id: pip-tools-compile + alias: compile-ci-linux-3.12-zmq-requirements + name: Linux CI Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(linux\.in|common\.in)|py3\.12/linux\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url - requirements/static/ci/linux.in - id: pip-tools-compile @@ -346,10 +583,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=linux - --out-prefix=linux - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -359,10 +597,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=linux - --out-prefix=linux - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -372,10 +611,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=linux - --out-prefix=linux - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -386,10 +626,41 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=linux - --out-prefix=linux - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile + alias: compile-ci-linux-crypto-3.11-requirements + name: Linux CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/linux-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --out-prefix=linux + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile + alias: compile-ci-linux-crypto-3.12-requirements + name: Linux CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/linux-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --out-prefix=linux + - --no-emit-index-url - requirements/static/ci/crypto.in @@ -400,6 +671,7 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=freebsd - --include=requirements/base.txt @@ -407,16 +679,17 @@ repos: - --include=requirements/pytest.txt - --include=requirements/static/pkg/freebsd.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/freebsd.txt + - --no-emit-index-url - requirements/static/ci/freebsd.in - id: pip-tools-compile alias: compile-ci-freebsd-3.8-zmq-requirements name: FreeBSD CI Py3.8 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|pkg/py3\.8/freebsd\.txt))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|py3\.8/freebsd\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=freebsd - --include=requirements/base.txt @@ -424,16 +697,17 @@ repos: - --include=requirements/pytest.txt - --include=requirements/static/pkg/freebsd.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/freebsd.txt + - --no-emit-index-url - requirements/static/ci/freebsd.in - id: pip-tools-compile alias: compile-ci-freebsd-3.9-zmq-requirements name: FreeBSD CI Py3.9 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|pkg/py3\.9/freebsd\.txt))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|py3\.9/freebsd\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=freebsd - --include=requirements/base.txt @@ -441,16 +715,17 @@ repos: - --include=requirements/pytest.txt - --include=requirements/static/pkg/freebsd.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/freebsd.txt + - --no-emit-index-url - requirements/static/ci/freebsd.in - id: pip-tools-compile alias: compile-ci-freebsd-3.10-zmq-requirements name: FreeBSD CI Py3.10 ZeroMQ Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|pkg/py3\.10/freebsd\.txt))$ + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|py3\.10/freebsd\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=freebsd - --include=requirements/base.txt @@ -458,7 +733,43 @@ repos: - --include=requirements/pytest.txt - --include=requirements/static/pkg/freebsd.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/freebsd.txt + - --no-emit-index-url + - requirements/static/ci/freebsd.in + + - id: pip-tools-compile + alias: compile-ci-freebsd-3.11-zmq-requirements + name: FreeBSD CI Py3.11 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|py3\.11/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/freebsd.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/freebsd.in + + - id: pip-tools-compile + alias: compile-ci-freebsd-3.12-zmq-requirements + name: FreeBSD CI Py3.12 ZeroMQ Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(freebsd|common)\.in|py3\.12/freebsd\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/freebsd.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url - requirements/static/ci/freebsd.in - id: pip-tools-compile @@ -468,10 +779,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=freebsd - --out-prefix=freebsd - - --pip-args=--constraint=requirements/static/ci/py{py_version}/freebsd.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -481,10 +793,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=freebsd - --out-prefix=freebsd - - --pip-args=--constraint=requirements/static/ci/py{py_version}/freebsd.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -494,10 +807,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=freebsd - --out-prefix=freebsd - - --pip-args=--constraint=requirements/static/ci/py{py_version}/freebsd.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -508,42 +822,109 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=freebsd - --out-prefix=freebsd - - --pip-args=--constraint=requirements/static/ci/py{py_version}/freebsd.txt + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile + alias: compile-ci-freebsd-crypto-3.11-requirements + name: FreeBSD CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/freebsd-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=freebsd + - --out-prefix=freebsd + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile + alias: compile-ci-freebsd-crypto-3.12-requirements + name: FreeBSD CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/crypto\.in)$ + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/freebsd-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=freebsd + - --out-prefix=freebsd + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile alias: compile-ci-darwin-3.9-zmq-requirements name: Darwin CI Py3.9 ZeroMQ Requirements - files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|pkg/py3\.9/darwin\.txt)))$ + files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|py3\.9/darwin\.txt)))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=darwin - --include=requirements/darwin.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/darwin.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/darwin.txt + - --no-emit-index-url - requirements/static/ci/darwin.in - id: pip-tools-compile alias: compile-ci-darwin-3.10-zmq-requirements name: Darwin CI Py3.10 ZeroMQ Requirements - files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|pkg/py3\.10/darwin\.txt)))$ + files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|py3\.10/darwin\.txt)))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=darwin - --include=requirements/darwin.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/darwin.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/darwin.txt + - --no-emit-index-url + - requirements/static/ci/darwin.in + + - id: pip-tools-compile + alias: compile-ci-darwin-3.11-zmq-requirements + name: Darwin CI Py3.11 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|py3\.11/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=darwin + - --include=requirements/darwin.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/darwin.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/darwin.in + + - id: pip-tools-compile + alias: compile-ci-darwin-3.12-zmq-requirements + name: Darwin CI Py3.12 ZeroMQ Requirements + files: ^(requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(darwin|common)\.in|py3\.12/darwin\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --include=requirements/darwin.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/darwin.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url - requirements/static/ci/darwin.in - id: pip-tools-compile @@ -553,10 +934,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=darwin - --out-prefix=darwin - - --pip-args=--constraint=requirements/static/ci/py{py_version}/darwin.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -566,74 +948,141 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=darwin - --out-prefix=darwin - - --pip-args=--constraint=requirements/static/ci/py{py_version}/darwin.txt + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile + alias: compile-ci-darwin-crypto-3.11-requirements + name: Darwin CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/darwin-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=darwin + - --out-prefix=darwin + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile + alias: compile-ci-darwin-crypto-3.12-requirements + name: Darwin CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/darwin-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=darwin + - --out-prefix=darwin + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile alias: compile-ci-windows-3.7-zmq-requirements name: Windows CI Py3.7 ZeroMQ Requirements - files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|pkg/py3\.7/windows\.txt))$ + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.7/windows\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=windows - --include=requirements/windows.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/windows.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/windows.txt + - --no-emit-index-url - requirements/static/ci/windows.in - id: pip-tools-compile alias: compile-ci-windows-3.8-zmq-requirements name: Windows CI Py3.8 ZeroMQ Requirements - files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|pkg/py3\.8/windows\.txt))$ + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.8/windows\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=windows - --include=requirements/windows.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/windows.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/windows.txt + - --no-emit-index-url - requirements/static/ci/windows.in - id: pip-tools-compile alias: compile-ci-windows-3.9-zmq-requirements name: Windows CI Py3.9 ZeroMQ Requirements - files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|pkg/py3\.9/windows\.txt))$ + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.9/windows\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=windows - --include=requirements/windows.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/windows.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/windows.txt + - --no-emit-index-url - requirements/static/ci/windows.in - id: pip-tools-compile alias: compile-ci-windows-3.10-zmq-requirements name: Windows CI Py3.10 ZeroMQ Requirements - files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|pkg/py3\.10/windows\.txt))$ + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.10/windows\.txt))$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=windows - --include=requirements/windows.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/windows.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/pkg/py{py_version}/windows.txt + - --no-emit-index-url + - requirements/static/ci/windows.in + + - id: pip-tools-compile + alias: compile-ci-windows-3.11-zmq-requirements + name: Windows CI Py3.11 ZeroMQ Requirements + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.11/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=windows + - --include=requirements/windows.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/windows.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/windows.in + + - id: pip-tools-compile + alias: compile-ci-windows-3.12-zmq-requirements + name: Windows CI Py3.12 ZeroMQ Requirements + files: requirements/((base|zeromq|pytest)\.txt|static/((ci|pkg)/(windows|common)\.in|py3\.12/windows\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --include=requirements/windows.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/windows.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url - requirements/static/ci/windows.in - id: pip-tools-compile @@ -643,10 +1092,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=windows - --out-prefix=windows - - --pip-args=--constraint=requirements/static/ci/py{py_version}/windows.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -656,10 +1106,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=windows - --out-prefix=windows - - --pip-args=--constraint=requirements/static/ci/py{py_version}/windows.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -669,10 +1120,11 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=windows - --out-prefix=windows - - --pip-args=--constraint=requirements/static/ci/py{py_version}/windows.txt + - --no-emit-index-url - requirements/static/ci/crypto.in - id: pip-tools-compile @@ -682,10 +1134,39 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=windows - --out-prefix=windows - - --pip-args=--constraint=requirements/static/ci/py{py_version}/windows.txt + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile + alias: compile-ci-windows-crypto-3.11-requirements + name: Windows CI Py3.11 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.11/windows-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=windows + - --out-prefix=windows + - --no-emit-index-url + - requirements/static/ci/crypto.in + + - id: pip-tools-compile + alias: compile-ci-windows-crypto-3.12-requirements + name: Windows CI Py3.12 Crypto Requirements + files: ^requirements/(crypto\.txt|static/ci/(crypto\.in|py3\.12/windows-crypto\.txt))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=windows + - --out-prefix=windows + - --no-emit-index-url - requirements/static/ci/crypto.in # <---- CI Requirements -------------------------------------------------------------------------------------------- @@ -699,13 +1180,14 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --include=requirements/base.txt - --include=requirements/zeromq.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/cloud.in - id: pip-tools-compile @@ -715,13 +1197,14 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --include=requirements/base.txt - --include=requirements/zeromq.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/cloud.in - id: pip-tools-compile @@ -731,13 +1214,14 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --include=requirements/base.txt - --include=requirements/zeromq.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/cloud.in - id: pip-tools-compile @@ -747,13 +1231,48 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --include=requirements/base.txt - --include=requirements/zeromq.txt - --include=requirements/pytest.txt - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url + - requirements/static/ci/cloud.in + + - id: pip-tools-compile + alias: compile-ci-cloud-3.11-requirements + name: Cloud CI Py3.11 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((cloud|common)\.in|py3\.11/cloud\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/cloud.in + + - id: pip-tools-compile + alias: compile-ci-cloud-3.12-requirements + name: Cloud CI Py3.12 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((cloud|common)\.in|py3\.12/cloud\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/pytest.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url - requirements/static/ci/cloud.in # <---- Cloud CI Requirements -------------------------------------------------------------------------------------- @@ -765,10 +1284,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=linux - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/ci/docs.in - id: pip-tools-compile @@ -778,10 +1299,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=linux - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/ci/docs.in - id: pip-tools-compile @@ -791,10 +1314,12 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=linux - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/ci/docs.in - id: pip-tools-compile @@ -804,10 +1329,42 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=linux - --include=requirements/base.txt - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/ci/docs.in + + - id: pip-tools-compile + alias: compile-doc-requirements + name: Docs CI Py3.11 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/ci/(docs|common|linux)\.in|static/pkg/linux\.in|static/pkg/.*/linux\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url + - requirements/static/ci/docs.in + + - id: pip-tools-compile + alias: compile-doc-requirements + name: Docs CI Py3.12 Requirements + files: ^requirements/((base|zeromq|pytest)\.txt|static/ci/(docs|common|linux)\.in|static/pkg/linux\.in|static/pkg/.*/linux\.txt)$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --no-emit-index-url - requirements/static/ci/docs.in # <---- Doc CI Requirements ---------------------------------------------------------------------------------------- @@ -820,6 +1377,7 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.7 - --platform=linux - --include=requirements/base.txt @@ -827,7 +1385,7 @@ repos: - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/lint.in - id: pip-tools-compile @@ -837,6 +1395,7 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=linux - --include=requirements/base.txt @@ -844,7 +1403,7 @@ repos: - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/lint.in - id: pip-tools-compile @@ -854,6 +1413,7 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=linux - --include=requirements/base.txt @@ -861,7 +1421,7 @@ repos: - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/lint.in - id: pip-tools-compile @@ -871,6 +1431,7 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=linux - --include=requirements/base.txt @@ -878,7 +1439,43 @@ repos: - --include=requirements/static/pkg/linux.in - --include=requirements/static/ci/linux.in - --include=requirements/static/ci/common.in - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url + - requirements/static/ci/lint.in + + - id: pip-tools-compile + alias: compile-ci-lint-3.11-requirements + name: Lint CI Py3.11 Requirements + files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.11/linux\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url + - requirements/static/ci/lint.in + + - id: pip-tools-compile + alias: compile-ci-lint-3.12-requirements + name: Lint CI Py3.12 Requirements + files: ^requirements/((base|zeromq)\.txt|static/(pkg/linux\.in|ci/(linux\.in|common\.in|lint\.in|py3\.12/linux\.txt)))$ + pass_filenames: false + args: + - -v + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --include=requirements/base.txt + - --include=requirements/zeromq.txt + - --include=requirements/static/pkg/linux.in + - --include=requirements/static/ci/linux.in + - --include=requirements/static/ci/common.in + - --no-emit-index-url - requirements/static/ci/lint.in # <---- Lint CI Requirements --------------------------------------------------------------------------------------- @@ -891,9 +1488,10 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.8 - --platform=linux - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/changelog.in - id: pip-tools-compile @@ -903,9 +1501,10 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - --platform=linux - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/changelog.in - id: pip-tools-compile @@ -915,110 +1514,127 @@ repos: pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - --platform=linux - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt + - --no-emit-index-url - requirements/static/ci/changelog.in - # <---- Changelog -------------------------------------------------------------------------------------------------- - # ----- Invoke ----------------------------------------------------------------------------------------------------> - id: pip-tools-compile - alias: compile-ci-invoke-3.7-requirements - name: Linux CI Py3.7 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.7/(invoke|linux)\.txt)$ + alias: compile-ci-changelog-3.11-requirements + name: Changelog CI Py3.11 Requirements + files: ^requirements/static/ci/(changelog\.in|py3\.11/(changelog|linux)\.txt)$ pass_filenames: false args: - -v - - --py-version=3.7 - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt - - requirements/static/ci/invoke.in + - --build-isolation + - --py-version=3.11 + - --platform=linux + - --no-emit-index-url + - requirements/static/ci/changelog.in - id: pip-tools-compile - alias: compile-ci-invoke-3.8-requirements - name: Linux CI Py3.8 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.8/(invoke|linux)\.txt)$ + alias: compile-ci-changelog-3.12-requirements + name: Changelog CI Py3.12 Requirements + files: ^requirements/static/ci/(changelog\.in|py3\.12/(changelog|linux)\.txt)$ pass_filenames: false args: - -v - - --py-version=3.8 - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt - - requirements/static/ci/invoke.in + - --build-isolation + - --py-version=3.12 + - --platform=linux + - --no-emit-index-url + - requirements/static/ci/changelog.in + # <---- Changelog -------------------------------------------------------------------------------------------------- + # ----- Tools ----------------------------------------------------------------------------------------------------> - id: pip-tools-compile - alias: compile-ci-invoke-3.9-requirements - name: Linux CI Py3.9 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.9/(invoke|linux)\.txt)$ + alias: compile-ci-tools-3.9-requirements + name: Linux CI Py3.9 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.9/(tools|linux)\.txt)$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.9 - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt - - requirements/static/ci/invoke.in + - --no-emit-index-url + - requirements/static/ci/tools.in - id: pip-tools-compile - alias: compile-ci-invoke-3.10-requirements - name: Linux CI Py3.10 Invoke Requirements - files: ^requirements/static/ci/(invoke\.in|py3.10/(invoke|linux)\.txt)$ + alias: compile-ci-tools-3.10-requirements + name: Linux CI Py3.10 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.10/(tools|linux)\.txt)$ pass_filenames: false args: - -v + - --build-isolation - --py-version=3.10 - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt - - requirements/static/ci/invoke.in - # <---- Invoke ----------------------------------------------------------------------------------------------------- + - --no-emit-index-url + - requirements/static/ci/tools.in - # <---- PKG ci requirements----------------------------------------------------------------------------------------- - id: pip-tools-compile - alias: compile-ci-pkg-3.10-requirements - name: PKG tests CI Py3.10 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests|common)\.in|py3\.10/pkgtests\.in)))$ + alias: compile-ci-tools-3.11-requirements + name: Linux CI Py3.11 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.11/(tools|linux)\.txt)$ pass_filenames: false args: - -v - - --py-version=3.10 - - --platform=linux - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - requirements/static/ci/pkgtests.in + - --build-isolation + - --py-version=3.11 + - --no-emit-index-url + - requirements/static/ci/tools.in - id: pip-tools-compile - alias: compile-ci-windows-pkg-3.10-requirements - name: PKG tests Windows CI Py3.10 Requirements - files: ^requirements/((base|zeromq|pytest)\.txt|static/(pkg/linux\.in|ci/((pkgtests-windows|common)\.in|py3\.10/pkgtests-windows\.in)))$ + alias: compile-ci-tools-3.12-requirements + name: Linux CI Py3.12 Tools Requirements + files: ^requirements/static/ci/(tools\.in|py3.12/(tools|linux)\.txt)$ pass_filenames: false args: - -v - - --py-version=3.10 - - --platform=windows - - --include=requirements/base.txt - - --include=requirements/zeromq.txt - - requirements/static/ci/pkgtests-windows.in - - # <---- PKG ci requirements----------------------------------------------------------------------------------------- - + - --build-isolation + - --py-version=3.12 + - --no-emit-index-url + - requirements/static/ci/tools.in - # ----- Tools ----------------------------------------------------------------------------------------------------> - id: pip-tools-compile - alias: compile-ci-tools-3.9-requirements - name: Linux CI Py3.9 Tools Requirements - files: ^requirements/static/ci/(tools\.in|py3.9/(tools|linux)\.txt)$ + alias: compile-ci-tools-virustotal-3.9-requirements + name: Linux CI Py3.9 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.9/(tools(-virustotal)?|linux)\.txt)$ pass_filenames: false args: - -v - --py-version=3.9 - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt - - requirements/static/ci/tools.in + - requirements/static/ci/tools-virustotal.in - id: pip-tools-compile - alias: compile-ci-tools-3.10-requirements - name: Linux CI Py3.10 Tools Requirements - files: ^requirements/static/ci/(tools\.in|py3.10/(tools|linux)\.txt)$ + alias: compile-ci-tools-virustotal-3.10-requirements + name: Linux CI Py3.10 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.10/(tools(-virustotal)?|linux)\.txt)$ pass_filenames: false args: - -v - --py-version=3.10 - - --pip-args=--constraint=requirements/static/ci/py{py_version}/linux.txt - - requirements/static/ci/tools.in + - requirements/static/ci/tools-virustotal.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.11-requirements + name: Linux CI Py3.11 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.11/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.11 + - requirements/static/ci/tools-virustotal.in + + - id: pip-tools-compile + alias: compile-ci-tools-virustotal-3.12-requirements + name: Linux CI Py3.12 Tools virustotal Requirements + files: ^requirements/static/ci/(tools(-virustotal)?\.in|py3.12/(tools(-virustotal)?|linux)\.txt)$ + pass_filenames: false + args: + - -v + - --py-version=3.12 + - requirements/static/ci/tools-virustotal.in # <---- Tools ----------------------------------------------------------------------------------------------------- # ----- Code Formatting -------------------------------------------------------------------------------------------> @@ -1044,7 +1660,7 @@ repos: - repo: https://github.com/s0undt3ch/salt-rewrite # Automatically rewrite code with known rules - rev: 2.0.0 + rev: 2.4.4 hooks: - id: salt-rewrite alias: rewrite-docstrings @@ -1056,10 +1672,6 @@ repos: salt/ext/.* )$ - - repo: https://github.com/s0undt3ch/salt-rewrite - # Automatically rewrite code with known rules - rev: 2.0.0 - hooks: - id: salt-rewrite alias: rewrite-tests name: Rewrite Salt's Test Suite @@ -1132,128 +1744,25 @@ repos: # <---- Security --------------------------------------------------------------------------------------------------- # ----- Pre-Commit ------------------------------------------------------------------------------------------------> - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-docs - name: Check Docs - files: ^(salt/.*\.py|doc/ref/.*\.rst)$ - args: - - docs.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-filemap - name: Check Filename Map Change Matching - files: ^tests/(filename_map\.yml|.*\.py)$ - pass_filenames: false - args: - - filemap.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: loader-check-virtual - name: Check loader modules __virtual__ - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - loader.check-virtual - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-docstrings - name: Check docstrings - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - docstrings.check - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - - repo: https://github.com/saltstack/invoke-pre-commit - rev: v1.9.0 - hooks: - - id: invoke - alias: check-known-missing-docstrings - name: Check Known Missing Docstrings - stages: [manual] - files: salt/.*\.py$ - exclude: > - (?x)^( - templates/.*| - salt/ext/.*| - )$ - args: - - docstrings.check - - --error-on-known-failures - additional_dependencies: - - blessings==1.7 - - pyyaml==6.0 - - distro==1.7.0 - - jinja2==3.0.3 - - msgpack==1.0.3 - - packaging - - looseversion - - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.0.0 + rev: v1.3.0 hooks: - id: mypy alias: mypy-tools name: Run mypy against tools files: ^tools/.*\.py$ - #args: [--strict] + exclude: > + (?x)^( + templates/.*| + salt/.*| + )$ additional_dependencies: - attrs - rich - types-attrs - types-pyyaml - types-requests + - python-tools-scripts>=0.18.6 - repo: https://github.com/saltstack/mirrors-nox rev: v2021.6.12 diff --git a/.pylintrc b/.pylintrc index 6e57ff6a4f7f..3991b5df08e8 100644 --- a/.pylintrc +++ b/.pylintrc @@ -697,7 +697,9 @@ allowed-3rd-party-modules=msgpack, fnmatch, ptscripts, packaging, - looseversion + looseversion, + pytestskipmarkers, + cryptography [EXCEPTIONS] diff --git a/CHANGELOG.md b/CHANGELOG.md index 0978874e1d47..0fd88914af8c 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,1426 @@ Versions are `MAJOR.PATCH`. # Changelog +## 3006.5 (2023-12-12) + + +### Removed + +- Tech Debt - support for pysss removed due to functionality addition in Python 3.3 [#65029](https://github.com/saltstack/salt/issues/65029) + + +### Fixed + +- Improved error message when state arguments are accidentally passed as a string [#38098](https://github.com/saltstack/salt/issues/38098) +- Allow `pip.install` to create a log file that is passed in if the parent directory is writeable [#44722](https://github.com/saltstack/salt/issues/44722) +- Fixed merging of complex pillar overrides with salt-ssh states [#59802](https://github.com/saltstack/salt/issues/59802) +- Fixed gpg pillar rendering with salt-ssh [#60002](https://github.com/saltstack/salt/issues/60002) +- Made salt-ssh states not re-render pillars unnecessarily [#62230](https://github.com/saltstack/salt/issues/62230) +- Made Salt maintain options in Debian package repo definitions [#64130](https://github.com/saltstack/salt/issues/64130) +- Migrated all [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts). + + * `tasks/docs.py` -> `tools/precommit/docs.py` + * `tasks/docstrings.py` -> `tools/precommit/docstrings.py` + * `tasks/loader.py` -> `tools/precommit/loader.py` + * `tasks/filemap.py` -> `tools/precommit/filemap.py` [#64374](https://github.com/saltstack/salt/issues/64374) +- Fix salt user login shell path in Debian packages [#64377](https://github.com/saltstack/salt/issues/64377) +- Fill out lsb_distrib_xxxx (best estimate) grains if problems with retrieving lsb_release data [#64473](https://github.com/saltstack/salt/issues/64473) +- Fixed an issue in the ``file.directory`` state where the ``children_only`` keyword + argument was not being respected. [#64497](https://github.com/saltstack/salt/issues/64497) +- Move salt.ufw to correct location /etc/ufw/applications.d/ [#64572](https://github.com/saltstack/salt/issues/64572) +- Fixed salt-ssh stacktrace when retcode is not an integer [#64575](https://github.com/saltstack/salt/issues/64575) +- Fixed SSH shell seldomly fails to report any exit code [#64588](https://github.com/saltstack/salt/issues/64588) +- Fixed some issues in x509_v2 execution module private key functions [#64597](https://github.com/saltstack/salt/issues/64597) +- Fixed grp.getgrall() in utils/user.py causing performance issues [#64888](https://github.com/saltstack/salt/issues/64888) +- Fix user.list_groups omits remote groups via sssd, etc. [#64953](https://github.com/saltstack/salt/issues/64953) +- Ensure sync from _grains occurs before attempting pillar compilation in case custom grain used in pillar file [#65027](https://github.com/saltstack/salt/issues/65027) +- Moved gitfs locks to salt working dir to avoid lock wipes [#65086](https://github.com/saltstack/salt/issues/65086) +- Only attempt to create a keys directory when `--gen-keys` is passed to the `salt-key` CLI [#65093](https://github.com/saltstack/salt/issues/65093) +- Fix nonce verification, request server replies do not stomp on eachother. [#65114](https://github.com/saltstack/salt/issues/65114) +- speed up yumpkg list_pkgs by not requiring digest or signature verification on lookup. [#65152](https://github.com/saltstack/salt/issues/65152) +- Fix pkg.latest failing on windows for winrepo packages where the package is already up to date [#65165](https://github.com/saltstack/salt/issues/65165) +- Ensure __kwarg__ is preserved when checking for kwargs. This change affects proxy minions when used with Deltaproxy, which had kwargs popped when targeting multiple minions id. [#65179](https://github.com/saltstack/salt/issues/65179) +- Fixes traceback when state id is an int in a reactor SLS file. [#65210](https://github.com/saltstack/salt/issues/65210) +- Install logrotate config as /etc/logrotate.d/salt-common for Debian packages + Remove broken /etc/logrotate.d/salt directory from 3006.3 if it exists. [#65231](https://github.com/saltstack/salt/issues/65231) +- Use ``sha256`` as the default ``hash_type``. It has been the default since Salt v2016.9 [#65287](https://github.com/saltstack/salt/issues/65287) +- Preserve ownership on log rotation [#65288](https://github.com/saltstack/salt/issues/65288) +- Ensure that the correct value of jid_inclue is passed if the argument is included in the passed keyword arguments. [#65302](https://github.com/saltstack/salt/issues/65302) +- Uprade relenv to 0.14.2 + - Update openssl to address CVE-2023-5363. + - Fix bug in openssl setup when openssl binary can't be found. + - Add M1 mac support. [#65316](https://github.com/saltstack/salt/issues/65316) +- Fix regex for filespec adding/deleting fcontext policy in selinux [#65340](https://github.com/saltstack/salt/issues/65340) +- Ensure CLI options take priority over Saltfile options [#65358](https://github.com/saltstack/salt/issues/65358) +- Test mode for state function `saltmod.wheel` no longer set's `result` to `(None,)` [#65372](https://github.com/saltstack/salt/issues/65372) +- Client only process events which tag conforms to an event return. [#65400](https://github.com/saltstack/salt/issues/65400) +- Fixes an issue setting user or machine policy on Windows when the Group Policy + directory is missing [#65411](https://github.com/saltstack/salt/issues/65411) +- Fix regression in file module which was not re-using a file client. [#65450](https://github.com/saltstack/salt/issues/65450) +- pip.installed state will now properly fail when a specified user does not exists [#65458](https://github.com/saltstack/salt/issues/65458) +- Publish channel connect callback method properly closes it's request channel. [#65464](https://github.com/saltstack/salt/issues/65464) +- Ensured the pillar in SSH wrapper modules is the same as the one used in template rendering when overrides are passed [#65483](https://github.com/saltstack/salt/issues/65483) +- Fix file.comment ignore_missing not working with multiline char [#65501](https://github.com/saltstack/salt/issues/65501) +- Warn when an un-closed transport client is being garbage collected. [#65554](https://github.com/saltstack/salt/issues/65554) +- Only generate the HMAC's for ``libssl.so.1.1`` and ``libcrypto.so.1.1`` if those files exist. [#65581](https://github.com/saltstack/salt/issues/65581) +- Fixed an issue where Salt Cloud would fail if it could not delete lingering + PAexec binaries [#65584](https://github.com/saltstack/salt/issues/65584) + + +### Added + +- Added Salt support for Debian 12 [#64223](https://github.com/saltstack/salt/issues/64223) +- Added Salt support for Amazon Linux 2023 [#64455](https://github.com/saltstack/salt/issues/64455) + + +### Security + +- Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 [#65268](https://github.com/saltstack/salt/issues/65268) +- Bump to `cryptography==41.0.7` due to https://github.com/advisories/GHSA-jfhm-5ghh-2f97 [#65643](https://github.com/saltstack/salt/issues/65643) + + +## 3006.4 (2023-10-16) + +### Security + +- Fix CVE-2023-34049 by ensuring we do not use a predictable name for the script and correctly check returncode of scp command. + This only impacts salt-ssh users using the pre-flight option. [#cve-2023-34049](https://github.com/saltstack/salt/issues/cve-2023-34049) +- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65163](https://github.com/saltstack/salt/issues/65163) +- Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 [#65268](https://github.com/saltstack/salt/issues/65268) +- Upgrade relenv to 0.13.12 to address CVE-2023-4807 [#65316](https://github.com/saltstack/salt/issues/65316) +- Bump to `urllib3==1.26.17` or `urllib3==2.0.6` due to https://github.com/advisories/GHSA-v845-jxx5-vc9f [#65334](https://github.com/saltstack/salt/issues/65334) +- Bump to `gitpython==3.1.37` due to https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65383](https://github.com/saltstack/salt/issues/65383) + + +## 3005.4 (2023-10-16) + +### Security + +- Fix CVE-2023-34049 by ensuring we do not use a predictable name for the script and correctly check returncode of scp command. + This only impacts salt-ssh users using the pre-flight option. (cve-2023-34049) +- Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 (#65267) +- Bump to `urllib3==1.26.17` or `urllib3==2.0.6` due to https://github.com/advisories/GHSA-v845-jxx5-vc9f (#65334) +- Bump to `gitpython==3.1.37` due to https://github.com/advisories/GHSA-cwvm-v4w8-q58c (#65383) + + +## Salt v3005.3 (2023-09-14) + +### Fixed + +- Fix __env__ and improve cache cleaning see more info at pull #65017. (#65002) + + +### Security + +- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c (#65167) + + +## 3006.3 (2023-09-06) + + +### Removed + +- Fedora 36 support was removed because it reached EOL [#64315](https://github.com/saltstack/salt/issues/64315) +- Handle deprecation warnings: + + * Switch to `FullArgSpec` since Py 3.11 no longer has `ArgSpec`, deprecated since Py 3.0 + * Stop using the deprecated `cgi` module + * Stop using the deprecated `pipes` module + * Stop using the deprecated `imp` module [#64553](https://github.com/saltstack/salt/issues/64553) + + +### Changed + +- Replace libnacl with PyNaCl [#64372](https://github.com/saltstack/salt/issues/64372) +- Don't hardcode the python version on the Salt Package tests and on the `pkg/debian/salt-cloud.postinst` file [#64553](https://github.com/saltstack/salt/issues/64553) +- Some more deprecated code fixes: + + * Stop using the deprecated `locale.getdefaultlocale()` function + * Stop accessing deprecated attributes + * `pathlib.Path.__enter__()` usage is deprecated and not required, a no-op [#64565](https://github.com/saltstack/salt/issues/64565) +- Bump to `pyyaml==6.0.1` due to https://github.com/yaml/pyyaml/issues/601 and address lint issues [#64657](https://github.com/saltstack/salt/issues/64657) + + +### Fixed + +- Fix for assume role when used salt-cloud to create aws ec2. [#52501](https://github.com/saltstack/salt/issues/52501) +- fixes aptpkg module by checking for blank comps. [#58667](https://github.com/saltstack/salt/issues/58667) +- `wheel.file_roots.find` is now able to find files in subdirectories of the roots. [#59800](https://github.com/saltstack/salt/issues/59800) +- pkg.latest no longer fails when multiple versions are reported to be installed (e.g. updating the kernel) [#60931](https://github.com/saltstack/salt/issues/60931) +- Do not update the credentials dictionary in `utils/aws.py` while iterating over it, and use the correct delete functionality [#61049](https://github.com/saltstack/salt/issues/61049) +- fixed runner not having a proper exit code when runner modules throw an exception. [#61173](https://github.com/saltstack/salt/issues/61173) +- `pip.list_all_versions` now works with `index_url` and `extra_index_url` [#61610](https://github.com/saltstack/salt/issues/61610) +- speed up file.recurse by using prefix with cp.list_master_dir and remove an un-needed loop. [#61998](https://github.com/saltstack/salt/issues/61998) +- Preserve test=True condition while running sub states. [#62590](https://github.com/saltstack/salt/issues/62590) +- Job returns are only sent to originating master [#62834](https://github.com/saltstack/salt/issues/62834) +- Fixes an issue with failing subsequent state runs with the lgpo state module. + The ``lgpo.get_polcy`` function now returns all boolean settings. [#63296](https://github.com/saltstack/salt/issues/63296) +- Fix SELinux get policy with trailing whitespace [#63336](https://github.com/saltstack/salt/issues/63336) +- Fixes an issue with boolean settings not being reported after being set. The + ``lgpo.get_polcy`` function now returns all boolean settings. [#63473](https://github.com/saltstack/salt/issues/63473) +- Ensure body is returned when salt.utils.http returns something other than 200 with tornado backend. [#63557](https://github.com/saltstack/salt/issues/63557) +- Allow long running pillar and file client requests to finish using request_channel_timeout and request_channel_tries minion config. [#63824](https://github.com/saltstack/salt/issues/63824) +- Fix state_queue type checking to allow int values [#64122](https://github.com/saltstack/salt/issues/64122) +- Call global logger when catching pip.list exceptions in states.pip.installed + Rename global logger `log` to `logger` inside pip_state [#64169](https://github.com/saltstack/salt/issues/64169) +- Fixes permissions created by the Debian and RPM packages for the salt user. + + The salt user created by the Debian and RPM packages to run the salt-master process, was previously given ownership of various directories in a way which compromised the benefits of running the salt-master process as a non-root user. + + This fix sets the salt user to only have write access to those files and + directories required for the salt-master process to run. [#64193](https://github.com/saltstack/salt/issues/64193) +- Fix user.present state when groups is unset to ensure the groups are unchanged, as documented. [#64211](https://github.com/saltstack/salt/issues/64211) +- Fixes issue with MasterMinion class loading configuration from `/etc/salt/minion.d/*.conf. + + The MasterMinion class (used for running orchestraions on master and other functionality) was incorrectly loading configuration from `/etc/salt/minion.d/*.conf`, when it should only load configuration from `/etc/salt/master` and `/etc/salt/master.d/*.conf`. [#64219](https://github.com/saltstack/salt/issues/64219) +- Fixed issue in mac_user.enable_auto_login that caused the user's keychain to be reset at each boot [#64226](https://github.com/saltstack/salt/issues/64226) +- Fixed KeyError in logs when running a state that fails. [#64231](https://github.com/saltstack/salt/issues/64231) +- Fixed x509_v2 `create_private_key`/`create_crl` unknown kwargs: __pub_fun... [#64232](https://github.com/saltstack/salt/issues/64232) +- remove the hard coded python version in error. [#64237](https://github.com/saltstack/salt/issues/64237) +- `salt-pip` now properly errors out when being called from a non `onedir` environment. [#64249](https://github.com/saltstack/salt/issues/64249) +- Ensure we return an error when adding the key fails in the pkgrepo state for debian hosts. [#64253](https://github.com/saltstack/salt/issues/64253) +- Fixed file client private attribute reference on `SaltMakoTemplateLookup` [#64280](https://github.com/saltstack/salt/issues/64280) +- Fix pkgrepo.absent failures on apt-based systems when repo either a) contains a + trailing slash, or b) there is an arch mismatch. [#64286](https://github.com/saltstack/salt/issues/64286) +- Fix detection of Salt codename by "salt_version" execution module [#64306](https://github.com/saltstack/salt/issues/64306) +- Ensure selinux values are handled lowercase [#64318](https://github.com/saltstack/salt/issues/64318) +- Remove the `clr.AddReference`, it is causing an `Illegal characters in path` exception [#64339](https://github.com/saltstack/salt/issues/64339) +- Update `pkg.group_installed` state to support repo options [#64348](https://github.com/saltstack/salt/issues/64348) +- Fix salt user login shell path in Debian packages [#64377](https://github.com/saltstack/salt/issues/64377) +- Allow for multiple user's keys presented when authenticating, for example: root, salt, etc. [#64398](https://github.com/saltstack/salt/issues/64398) +- Fixed an issue with ``lgpo_reg`` where existing entries for the same key in + ``Registry.pol`` were being overwritten in subsequent runs if the value name in + the subesequent run was contained in the existing value name. For example, a + key named ``SetUpdateNotificationLevel`` would be overwritten by a subsequent + run attempting to set ``UpdateNotificationLevel`` [#64401](https://github.com/saltstack/salt/issues/64401) +- Add search for %ProgramData%\Chocolatey\choco.exe to determine if Chocolatey is installed or not [#64427](https://github.com/saltstack/salt/issues/64427) +- Fix regression for user.present on handling groups with dupe GIDs [#64430](https://github.com/saltstack/salt/issues/64430) +- Fix inconsistent use of args in ssh_auth.managed [#64442](https://github.com/saltstack/salt/issues/64442) +- Ensure we raise an error when the name argument is invalid in pkgrepo.managed state for systems using apt. [#64451](https://github.com/saltstack/salt/issues/64451) +- Fix file.symlink will not replace/update existing symlink [#64477](https://github.com/saltstack/salt/issues/64477) +- Fixed salt-ssh state.* commands returning retcode 0 when state/pillar rendering fails [#64514](https://github.com/saltstack/salt/issues/64514) +- Fix pkg.install when using a port in the url. [#64516](https://github.com/saltstack/salt/issues/64516) +- `win_pkg` Fixes an issue runing `pkg.install` with `version=latest` where the + new installer would not be cached if there was already an installer present + with the same name. [#64519](https://github.com/saltstack/salt/issues/64519) +- Added a `test:full` label in the salt repository, which, when selected, will force a full test run. [#64539](https://github.com/saltstack/salt/issues/64539) +- Syndic's async_req_channel uses the asynchornous version of request channel [#64552](https://github.com/saltstack/salt/issues/64552) +- Ensure runners properly save information to job cache. [#64570](https://github.com/saltstack/salt/issues/64570) +- Added salt.ufw to salt-master install on Debian and Ubuntu [#64572](https://github.com/saltstack/salt/issues/64572) +- Added support for Chocolatey 2.0.0+ while maintaining support for older versions [#64622](https://github.com/saltstack/salt/issues/64622) +- Updated semanage fcontext to use --modify if context already exists when adding context [#64625](https://github.com/saltstack/salt/issues/64625) +- Preserve request client socket between requests. [#64627](https://github.com/saltstack/salt/issues/64627) +- Show user friendly message when pillars timeout [#64651](https://github.com/saltstack/salt/issues/64651) +- File client timeouts durring jobs show user friendly errors instead of tracbacks [#64653](https://github.com/saltstack/salt/issues/64653) +- SaltClientError does not log a traceback on minions, we expect these to happen so a user friendly log is shown. [#64729](https://github.com/saltstack/salt/issues/64729) +- Look in location salt is running from, this accounts for running from an unpacked onedir file that has not been installed. [#64877](https://github.com/saltstack/salt/issues/64877) +- Preserve credentials on spawning platforms, minions no longer re-authenticate + with every job when using `multiprocessing=True`. [#64914](https://github.com/saltstack/salt/issues/64914) +- Fixed uninstaller to not remove the `salt` directory by default. This allows + the `extras-3.##` folder to persist so salt-pip dependencies are not wiped out + during an upgrade. [#64957](https://github.com/saltstack/salt/issues/64957) +- fix msteams by adding the missing header that Microsoft is now enforcing. [#64973](https://github.com/saltstack/salt/issues/64973) +- Fix __env__ and improve cache cleaning see more info at pull #65017. [#65002](https://github.com/saltstack/salt/issues/65002) +- Better error message on inconsistent decoded payload [#65020](https://github.com/saltstack/salt/issues/65020) +- Handle permissions access error when calling `lsb_release` with the salt user [#65024](https://github.com/saltstack/salt/issues/65024) +- Allow schedule state module to update schedule when the minion is offline. [#65033](https://github.com/saltstack/salt/issues/65033) +- Fixed creation of wildcard DNS in SAN in `x509_v2` [#65072](https://github.com/saltstack/salt/issues/65072) +- The macOS installer no longer removes the extras directory [#65073](https://github.com/saltstack/salt/issues/65073) + + +### Added + +- Added a script to automate setting up a 2nd minion in a user context on Windows [#64439](https://github.com/saltstack/salt/issues/64439) +- Several fixes to the CI workflow: + + * Don't override the `on` Jinja block on the `ci.yaml` template. This enables reacting to labels getting added/removed + to/from pull requests. + * Switch to using `tools` and re-use the event payload available instead of querying the GH API again to get the pull + request labels + * Concentrate test selection by labels to a single place + * Enable code coverage on pull-requests by setting the `test:coverage` label [#64547](https://github.com/saltstack/salt/issues/64547) + + +### Security + +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `aiohttp==3.8.5` due to https://github.com/advisories/GHSA-45c4-8wx5-qw6w [#64687](https://github.com/saltstack/salt/issues/64687) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) +- Update to `gitpython>=3.1.32` due to https://github.com/advisories/GHSA-pr76-5cm5-w9cj [#64988](https://github.com/saltstack/salt/issues/64988) + + +## 3006.2 (2023-08-09) + + +### Fixed + +- In scenarios where PythonNet fails to load, Salt will now fall back to WMI for + gathering grains information [#64897](https://github.com/saltstack/salt/issues/64897) + + +### Security + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection [#cve-2023-20897](https://github.com/saltstack/salt/issues/cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. [#cve-2023-20898](https://github.com/saltstack/salt/issues/cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q [#64336](https://github.com/saltstack/salt/issues/64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) + + +## Salt v3005.2 (2023-07-31) + +### Changed + +- Additional required package upgrades + + * It's now `pyzmq>=20.0.0` on all platforms, and `<=22.0.3` just for windows. + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. (#63757) + + +### Security + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection (cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. (cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q (#64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 (#64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 + + Python 3.5 cannot get the updated requirements since certifi no longer supports this python version (#64720) + + +## 3006.1 (2023-05-05) + + +### Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + +## 3006.0 (2023-04-18) + + +### Removed + +- Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) +- Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) +- Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) +- Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) +- Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + + +### Deprecated + +- renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) +- Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) +- The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) +- The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) +- Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) +- Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) +- Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) +- Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) +- Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) +- Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) +- `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + + +### Changed + +- More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) +- Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) +- ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) +- Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) +- Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) +- Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) +- Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) +- Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) +- Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) +- Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) +- Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) +- Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) +- netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt-api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt-api. [#63050](https://github.com/saltstack/salt/issues/63050) +- Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) +- Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) +- Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) +- Upgraded to `relenv==0.9.0` [#63883](https://github.com/saltstack/salt/issues/63883) + + +### Fixed + +- Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) +- Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) +- Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) +- Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) +- Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) +- Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) +- Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) +- Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) +- Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) +- Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) +- Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) +- Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) +- Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) +- When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) +- Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) +- Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) +- Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) +- Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) +- Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) +- Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) +- Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) +- Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) +- Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) +- Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) +- Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) +- Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) +- Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) +- Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) +- Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) +- Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) +- Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) +- linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) +- Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) +- Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) +- Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) +- Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) +- Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) +- Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) +- Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) +- Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) +- Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) +- add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) +- Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) +- Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) +- Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) +- Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) +- Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) +- Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) +- Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) +- Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) +- Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) +- Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) +- Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) +- Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) +- Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) +- state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) +- Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) +- Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) +- Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb-release is installed or not. They have been changed to + only derive these OS grains from `/etc/os-release`. [#61618](https://github.com/saltstack/salt/issues/61618) +- Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) +- Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) +- Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) +- Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) +- Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) +- Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) +- Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) +- When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) +- Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) +- win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) +- win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) +- Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) +- Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) +- Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) +- Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) +- Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) +- Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) +- Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) +- Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) +- Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) +- Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) +- Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) +- Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) +- Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) +- Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) +- Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) +- Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) +- Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) +- Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) +- Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) +- Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) +- Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) +- Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) +- Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) +- Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) +- Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) +- Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) +- Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) +- Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) +- Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) +- Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) +- Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) +- Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) +- Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) +- Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) +- Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) +- When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) +- Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) +- Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) +- When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) +- The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) +- sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) +- Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) +- Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) +- Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) +- Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) +- Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) +- Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) +- Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) +- Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) +- Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) +- Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) +- Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) +- Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) +- Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) +- Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) +- Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) +- Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) +- Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) +- Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) +- updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) +- Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) +- Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) +- Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) +- Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) +- Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) +- Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) +- Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) +- Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) +- Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) +- Remove mako as a dependency in Windows and macOS. [#62785](https://github.com/saltstack/salt/issues/62785) +- Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) +- Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) +- Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) +- Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) +- Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) +- Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) +- Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) +- Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) +- LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) +- Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) +- Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) +- Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) +- Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) +- Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) +- removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) +- Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) +- Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) +- Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) +- Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) +- Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) +- Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) +- Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) +- Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) +- Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) +- The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) +- Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) +- Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) +- Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) +- Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) +- Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) +- TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) +- Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) +- User responsible for the runner is now correctly reported in the events on the event bus for the runner. [#63148](https://github.com/saltstack/salt/issues/63148) +- Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) +- Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) +- Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) +- Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) +- ``service.status`` on Windows does no longer throws a CommandExecutionError if + the service is not found on the system. It now returns "Not Found" instead. [#63577](https://github.com/saltstack/salt/issues/63577) +- When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) +- LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) +- Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (``delete_after``) [#63650](https://github.com/saltstack/salt/issues/63650) +- When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) +- have salt.template.compile_template_str cleanup its temp files. [#63724](https://github.com/saltstack/salt/issues/63724) +- Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) +- Fixed an issue with generating fingerprints for public keys with different line endings [#63742](https://github.com/saltstack/salt/issues/63742) +- Add `fileserver_interval` and `maintenance_interval` master configuration options. These options control how often to restart the FileServerUpdate and Maintenance processes. Some file server and pillar configurations are known to cause memory leaks over time. A notable example of this are configurations that use pygit2. Salt can not guarantee dependency libraries like pygit2 won't leak memory. Restarting any long running processes that use pygit2 guarantees we can keep the master's memory usage in check. [#63747](https://github.com/saltstack/salt/issues/63747) +- mac_xattr.list and mac_xattr.read will replace undecode-able bytes to avoid raising CommandExecutionError. [#63779](https://github.com/saltstack/salt/issues/63779) [#63779](https://github.com/saltstack/salt/issues/63779) +- Change default GPG keyserver from pgp.mit.edu to keys.openpgp.org. [#63806](https://github.com/saltstack/salt/issues/63806) +- fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) +- Ensure kwargs is passed along to _call_apt when passed into install function. [#63847](https://github.com/saltstack/salt/issues/63847) +- remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) +- add linux_distribution to util to stop dep warning [#63904](https://github.com/saltstack/salt/issues/63904) +- Fix valuerror when trying to close fileclient. Remove usage of __del__ and close the filclient properly. [#63920](https://github.com/saltstack/salt/issues/63920) +- Handle the situation when a sub proxy minion does not init properly, eg. an exception happens, and the sub proxy object is not available. [#63923](https://github.com/saltstack/salt/issues/63923) +- Clarifying documentation for extension_modules configuration option. [#63929](https://github.com/saltstack/salt/issues/63929) +- Windows pkg module now properly handles versions containing strings [#63935](https://github.com/saltstack/salt/issues/63935) +- Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn't used by that function. [#63948](https://github.com/saltstack/salt/issues/63948) +- Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows [#63981](https://github.com/saltstack/salt/issues/63981) +- Hardened permissions on workers.ipc and master_event_pub.ipc. [#64063](https://github.com/saltstack/salt/issues/64063) + + +### Added + +- Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `--salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) +- Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) +- Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) +- Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) +- A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) +- Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) +- Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) +- Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) +- Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) +- State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) +- Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) +- Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) +- Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) +- Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) +- Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) +- Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) +- Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) +- Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) +- Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) +- Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) +- Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) +- Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) +- 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) +- Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) +- Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) +- Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) +- Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) +- Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) +- Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) +- Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) +- Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) +- Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) +- Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) +- Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) +- Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) +- Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) +- Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) +- Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) +- Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) +- Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) +- Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) +- Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) +- Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) +- salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) +- Fix running fast tests twice and add git labels to suite. [#63081](https://github.com/saltstack/salt/issues/63081) +- Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) +- Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) +- Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) +- Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) +- Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) +- Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) +- Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) +- Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) +- Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) +- Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) +- Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +- Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) +- Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) +- Adding the ability to exclude arguments from a state that end up passed to cmd.retcode when requisites such as onlyif or unless are used. [#63956](https://github.com/saltstack/salt/issues/63956) +- Add --next-release argument to salt/version.py, which prints the next upcoming release. [#64023](https://github.com/saltstack/salt/issues/64023) + + +### Security + +- Upgrade Requirements Due to Security Issues. + + * Upgrade to `cryptography>=39.0.1` due to: + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. + * Update to `markdown-it-py==2.2.0` due to: + * https://github.com/advisories/GHSA-jrwr-5x3p-hvc3 + * https://github.com/advisories/GHSA-vrjv-mxr7-vjf8 [#63882](https://github.com/saltstack/salt/issues/63882) + + +## 3006.0rc3 (2023-03-29) + + +### Removed + +- Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) +- Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) +- Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) +- Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) +- Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + + +### Deprecated + +- renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) +- Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) +- The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) +- The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) +- Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) +- Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) +- Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) +- Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) +- Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) +- Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) +- `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + + +### Changed + +- More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) +- Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) +- ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) +- Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) +- Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) +- Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) +- Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) +- Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) +- Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) +- Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) +- Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) +- Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) +- netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt-api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt-api. [#63050](https://github.com/saltstack/salt/issues/63050) +- Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) +- Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) +- Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) +- Upgraded to `relenv==0.9.0` [#63883](https://github.com/saltstack/salt/issues/63883) + + +### Fixed + +- Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) +- Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) +- Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) +- Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) +- Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) +- Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) +- Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) +- Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) +- Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) +- Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) +- Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) +- Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) +- Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) +- When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) +- Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) +- Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) +- Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) +- Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) +- Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) +- Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) +- Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) +- Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) +- Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) +- Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) +- Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) +- Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) +- Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) +- Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) +- Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) +- Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) +- Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) +- linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) +- Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) +- Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) +- Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) +- Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) +- Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) +- Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) +- Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) +- Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) +- Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) +- add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) +- Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) +- Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) +- Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) +- Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) +- Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) +- Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) +- Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) +- Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) +- Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) +- Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) +- Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) +- Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) +- Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) +- state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) +- Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) +- Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) +- Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb-release is installed or not. They have been changed to + only derive these OS grains from `/etc/os-release`. [#61618](https://github.com/saltstack/salt/issues/61618) +- Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) +- Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) +- Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) +- Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) +- Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) +- Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) +- Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) +- When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) +- Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) +- win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) +- win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) +- Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) +- Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) +- Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) +- Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) +- Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) +- Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) +- Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) +- Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) +- Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) +- Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) +- Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) +- Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) +- Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) +- Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) +- Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) +- Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) +- Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) +- Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) +- Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) +- Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) +- Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) +- Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) +- Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) +- Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) +- Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) +- Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) +- Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) +- Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) +- Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) +- Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) +- Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) +- Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) +- Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) +- Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) +- Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) +- When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) +- Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) +- Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) +- When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) +- The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) +- sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) +- Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) +- Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) +- Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) +- Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) +- Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) +- Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) +- Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) +- Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) +- Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) +- Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) +- Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) +- Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) +- Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) +- Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) +- Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) +- Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) +- Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) +- Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) +- updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) +- Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) +- Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) +- Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) +- Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) +- Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) +- Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) +- Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) +- Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) +- Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) +- Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) +- Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) +- Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) +- Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) +- Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) +- Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) +- Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) +- Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) +- LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) +- Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) +- Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) +- Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) +- Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) +- Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) +- removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) +- Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) +- Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) +- Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) +- Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) +- Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) +- Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) +- Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) +- Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) +- Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) +- The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) +- Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) +- Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) +- Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) +- Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) +- Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) +- TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) +- Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) +- Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) +- Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) +- Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) +- Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) +- When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) +- LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) +- Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (``delete_after``) [#63650](https://github.com/saltstack/salt/issues/63650) +- When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) +- have salt.template.compile_template_str cleanup its temp files. [#63724](https://github.com/saltstack/salt/issues/63724) +- Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) +- Fixed an issue with generating fingerprints for public keys with different line endings [#63742](https://github.com/saltstack/salt/issues/63742) +- Change default GPG keyserver from pgp.mit.edu to keys.openpgp.org. [#63806](https://github.com/saltstack/salt/issues/63806) +- fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) +- Ensure kwargs is passed along to _call_apt when passed into install function. [#63847](https://github.com/saltstack/salt/issues/63847) +- remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) +- add linux_distribution to util to stop dep warning [#63904](https://github.com/saltstack/salt/issues/63904) +- Handle the situation when a sub proxy minion does not init properly, eg. an exception happens, and the sub proxy object is not available. [#63923](https://github.com/saltstack/salt/issues/63923) +- Clarifying documentation for extension_modules configuration option. [#63929](https://github.com/saltstack/salt/issues/63929) +- Windows pkg module now properly handles versions containing strings [#63935](https://github.com/saltstack/salt/issues/63935) +- Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn't used by that function. [#63948](https://github.com/saltstack/salt/issues/63948) +- Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows [#63981](https://github.com/saltstack/salt/issues/63981) + + +### Added + +- Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `--salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) +- Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) +- Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) +- Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) +- A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) +- Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) +- Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) +- Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) +- Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) +- State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) +- Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) +- Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) +- Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) +- Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) +- Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) +- Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) +- Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) +- Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) +- Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) +- Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) +- Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) +- Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) +- 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) +- Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) +- Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) +- Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) +- Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) +- Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) +- Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) +- Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) +- Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) +- Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) +- Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) +- Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) +- Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) +- Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) +- Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) +- Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) +- Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) +- Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) +- Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) +- Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) +- Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) +- salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) +- Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) +- Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) +- Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) +- Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) +- Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) +- Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) +- Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) +- Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) +- Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) +- Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) +- Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +- Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) +- Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) + + +### Security + +- Upgrade Requirements Due to Security Issues. + + * Upgrade to `cryptography>=39.0.1` due to: + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. + * Update to `markdown-it-py==2.2.0` due to: + * https://github.com/advisories/GHSA-jrwr-5x3p-hvc3 + * https://github.com/advisories/GHSA-vrjv-mxr7-vjf8 [#63882](https://github.com/saltstack/salt/issues/63882) + + +## 3006.0rc2 (2023-03-19) + + +### Removed + +- Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) +- Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) +- Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) +- Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) +- Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + + +### Deprecated + +- renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) +- Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) +- The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) +- The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) +- Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) +- Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) +- Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) +- Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) +- Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) +- Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) +- `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + + +### Changed + +- More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) +- Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) +- ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) +- Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) +- Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) +- Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) +- Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) +- Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) +- Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) +- Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) +- Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) +- Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) +- netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt-api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt-api. [#63050](https://github.com/saltstack/salt/issues/63050) +- Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) +- Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) +- Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) + + +### Fixed + +- Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) +- Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) +- Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) +- Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) +- Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) +- Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) +- Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) +- Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) +- Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) +- Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) +- Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) +- Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) +- Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) +- When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) +- Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) +- Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) +- Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) +- Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) +- Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) +- Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) +- Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) +- Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) +- Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) +- Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) +- Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) +- Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) +- Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) +- Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) +- Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) +- Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) +- Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) +- linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) +- Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) +- Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) +- Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) +- Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) +- Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) +- Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) +- Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) +- Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) +- Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) +- add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) +- Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) +- Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) +- Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) +- Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) +- Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) +- Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) +- Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) +- Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) +- Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) +- Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) +- Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) +- Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) +- Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) +- state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) +- Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) +- Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) +- Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb-release is installed or not. They have been changed to + only derive these OS grains from `/etc/os-release`. [#61618](https://github.com/saltstack/salt/issues/61618) +- Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) +- Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) +- Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) +- Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) +- Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) +- Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) +- Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) +- When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) +- Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) +- win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) +- win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) +- Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) +- Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) +- Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) +- Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) +- Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) +- Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) +- Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) +- Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) +- Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) +- Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) +- Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) +- Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) +- Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) +- Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) +- Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) +- Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) +- Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) +- Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) +- Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) +- Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) +- Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) +- Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) +- Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) +- Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) +- Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) +- Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) +- Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) +- Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) +- Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) +- Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) +- Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) +- Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) +- Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) +- Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) +- Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) +- When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) +- Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) +- Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) +- When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) +- The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) +- sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) +- Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) +- Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) +- Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) +- Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) +- Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) +- Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) +- Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) +- Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) +- Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) +- Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) +- Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) +- Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) +- Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) +- Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) +- Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) +- Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) +- Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) +- Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) +- updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) +- Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) +- Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) +- Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) +- Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) +- Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) +- Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) +- Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) +- Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) +- Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) +- Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) +- Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) +- Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) +- Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) +- Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) +- Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) +- Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) +- Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) +- LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) +- Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) +- Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) +- Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) +- Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) +- Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) +- removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) +- Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) +- Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) +- Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) +- Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) +- Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) +- Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) +- Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) +- Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) +- Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) +- The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) +- Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) +- Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) +- Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) +- Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) +- Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) +- TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) +- Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) +- Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) +- Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) +- Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) +- Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) +- When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) +- LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) +- When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) +- Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) +- fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) +- remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) + + +### Added + +- Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `--salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) +- Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) +- Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) +- Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) +- A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) +- Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) +- Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) +- Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) +- Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) +- State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) +- Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) +- Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) +- Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) +- Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) +- Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) +- Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) +- Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) +- Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) +- Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) +- Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) +- Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) +- Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) +- 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) +- Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) +- Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) +- Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) +- Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) +- Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) +- Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) +- Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) +- Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) +- Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) +- Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) +- Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) +- Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) +- Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) +- Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) +- Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) +- Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) +- Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) +- Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) +- Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) +- Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) +- salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) +- Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) +- Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) +- Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) +- Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) +- Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) +- Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) +- Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) +- Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) +- Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) +- Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) +- Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +- Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) +- Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) + + ## 3006.0rc1 (2023-03-01) @@ -332,6 +1752,20 @@ Versions are `MAJOR.PATCH`. - Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +## Salt v3005.1-2 (2022-11-04) + +Note: This release is only impacting the packages not the Salt code base. + +### Fixed + +- Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang (#62937) +- Onedir Package Fix: Fix "No such file or directory" error on Rhel installs. (#62948) + +### Security + +- Update the onedir packages Python version to 3.8.15 for Windows and 3.9.15 for Linux and Mac + + ## Salt 3005.1 (2022-09-26) ### Fixed diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index 91b09c73752b..ee4ab88e193e 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -8,7 +8,7 @@ in a number of ways: - Use Salt and open well-written bug reports. - Join a `working group `__. - Answer questions on `irc `__, - the `community Slack `__, + the `community Slack `__, the `salt-users mailing list `__, `Server Fault `__, @@ -513,7 +513,7 @@ Now you can run your tests: :: - python -m nox -e "pytest-3.7(coverage=False)" -- tests/unit/cli/test_batch.py + python -m nox -e "test-3(coverage=False)" -- tests/unit/cli/test_batch.py It's a good idea to install `espeak `__ or use ``say`` on @@ -522,11 +522,31 @@ this: :: - python -m nox -e "pytest-3.7(coverage=False)" -- tests/unit/cli/test_batch.py; espeak "Tests done, woohoo!" + python -m nox -e "test-3(coverage=False)" -- tests/unit/cli/test_batch.py; espeak "Tests done, woohoo!" That way you don't have to keep monitoring the actual test run. +:: + + python -m nox -e "test-3(coverage=False)" -- --core-tests + +You can enable or disable test groups locally by passing their respected flag: + +* --no-fast-tests - Tests that are ~10s or faster. Fast tests make up ~75% of tests and can run in 10 to 20 minutes. +* --slow-tests - Tests that are ~10s or slower. +* --core-tests - Tests of any speed that test the root parts of salt. +* --flaky-jail - Test that need to be temporarily skipped. + +In your PR, you can enable or disable test groups by setting a label. +All fast, slow, and core tests specified in the change file will always run. + +* test:no-fast +* test:core +* test:slow +* test:flaky-jail + + Changelog and commit! --------------------- When you write your commit message you should use imperative style. Do @@ -543,7 +563,7 @@ But that advice is backwards for the changelog. We follow the our changelog, and use towncrier to generate it for each release. As a contributor, all that means is that you need to add a file to the ``salt/changelog`` directory, using the ``.`` format. For -instanch, if you fixed issue 123, you would do: +instance, if you fixed issue 123, you would do: :: @@ -558,6 +578,9 @@ commit message, it's usually a good idea to add other information, such as This will also help you out, because when you go to create the PR it will automatically insert the body of your commit messages. +See the `changelog `__ +docs for more information. + Pull request time! ------------------ diff --git a/README.rst b/README.rst index f5121f1a74d0..dd32e11cc5fb 100644 --- a/README.rst +++ b/README.rst @@ -10,9 +10,9 @@ :alt: PyPi Package Downloads :target: https://lgtm.com/projects/g/saltstack/salt/context:python -.. image:: https://img.shields.io/badge/slack-@saltstackcommunity-blue.svg?logo=slack +.. image:: https://img.shields.io/badge/slack-SaltProject-blue.svg?logo=slack :alt: Salt Project Slack Community - :target: https://join.slack.com/t/saltstackcommunity/shared_invite/zt-3av8jjyf-oBQ2M0vhXOhJpNpRkPWBvg + :target: https://via.vmw.com/salt-slack .. image:: https://img.shields.io/twitch/status/saltprojectoss :alt: Salt Project Twitch Channel @@ -71,7 +71,8 @@ In addition to configuration management Salt can also: About our sponsors ================== -Salt powers VMware's `vRealize Automation SaltStack Config`_, and can be found +Salt powers VMware's `VMware Aria Automation Config`_ +(previously vRealize Automation SaltStack Config / SaltStack Enterprise), and can be found under the hood of products from Juniper, Cisco, Cloudflare, Nutanix, SUSE, and Tieto, to name a few. @@ -120,7 +121,7 @@ Security advisories Keep an eye on the Salt Project `Security Announcements `_ landing page. Salt Project recommends subscribing to the -`Salt Project Security RSS feed `_ +`Salt Project Security RSS feed `_ to receive notification when new information is available regarding security announcements. @@ -179,8 +180,8 @@ used by external modules. A complete list of attributions and dependencies can be found here: `salt/DEPENDENCIES.md `_ -.. _Salt Project Community Slack: https://join.slack.com/t/saltstackcommunity/shared_invite/zt-3av8jjyf-oBQ2M0vhXOhJpNpRkPWBvg -.. _vRealize Automation SaltStack Config: https://www.vmware.com/products/vrealize-automation/saltstack-config.html +.. _Salt Project Community Slack: https://via.vmw.com/salt-slack +.. _VMware Aria Automation Config: https://www.vmware.com/products/vrealize-automation/saltstack-config.html .. _Latest Salt Documentation: https://docs.saltproject.io/en/latest/ .. _Open an issue: https://github.com/saltstack/salt/issues/new/choose .. _SECURITY.md: https://github.com/saltstack/salt/blob/master/SECURITY.md diff --git a/SECURITY.md b/SECURITY.md index b161605e9f02..97afd202de6d 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -1,7 +1,7 @@ # SaltStack's Security Disclosure Policy **Email** -- security@saltstack.com +- saltproject-security.pdl@broadcom.com **GPG key ID:** - 4EA0793D @@ -78,7 +78,7 @@ vtBIWO4LPeGEvb2Gs65PL2eouOqU6yvBr5Y= -----END PGP PUBLIC KEY BLOCK----- ``` -The SaltStack Security Team is available at security@saltstack.com for +The SaltStack Security Team is available at saltproject-security.pdl@broadcom.com for security-related bug reports or questions. We request the disclosure of any security-related bugs or issues be reported @@ -92,7 +92,7 @@ SaltStack takes security and the trust of our customers and users very seriously. Our disclosure policy is intended to resolve security issues as quickly and safely as is possible. -1. A security report sent to security@saltstack.com is assigned to a team +1. A security report sent to saltproject-security.pdl@broadcom.com is assigned to a team member. This person is the primary contact for questions and will coordinate the fix, release, and announcement. diff --git a/SUPPORT.rst b/SUPPORT.rst index bfe9d1cbeae7..f98c4d3f2eee 100644 --- a/SUPPORT.rst +++ b/SUPPORT.rst @@ -11,7 +11,7 @@ it may take a few moments for someone to reply. **SaltStack Slack** - Alongside IRC is our SaltStack Community Slack for the SaltStack Working groups. Use the following link to request an invitation. -``_ +``_ **Mailing List** - The SaltStack community users mailing list is hosted by Google groups. Anyone can post to ask questions about SaltStack products and diff --git a/changelog/31468.added.md b/changelog/31468.added.md deleted file mode 100644 index f1b2b802ba99..000000000000 --- a/changelog/31468.added.md +++ /dev/null @@ -1 +0,0 @@ -Allow users to enable 'queue=True' for all state runs via config file diff --git a/changelog/3396.added.md b/changelog/3396.added.md deleted file mode 100644 index 8a6987bb4a5e..000000000000 --- a/changelog/3396.added.md +++ /dev/null @@ -1,5 +0,0 @@ -Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, -but can be individually customized during installation by specifying -`--salt-lib-state-dir` during installation. Change the default `pki_dir` to -`/pki/master` (for the master) and `/pki/minion` -(for the minion). diff --git a/changelog/36138.fixed.md b/changelog/36138.fixed.md deleted file mode 100644 index 78b2e97cc44f..000000000000 --- a/changelog/36138.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Add kwargs to handle extra parameters for http.query diff --git a/changelog/39292.fixed.md b/changelog/39292.fixed.md deleted file mode 100644 index cbb472423aa8..000000000000 --- a/changelog/39292.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix mounted bind mounts getting active mount options added diff --git a/changelog/40054.fixed.md b/changelog/40054.fixed.md deleted file mode 100644 index 57641e5c186b..000000000000 --- a/changelog/40054.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix `sysctl.present` converts spaces to tabs. diff --git a/changelog/42306.fixed.md b/changelog/42306.fixed.md deleted file mode 100644 index d63927491711..000000000000 --- a/changelog/42306.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixes state pkg.purged to purge removed packages on Debian family systems diff --git a/changelog/43287.added.md b/changelog/43287.added.md deleted file mode 100644 index 90b47e65d443..000000000000 --- a/changelog/43287.added.md +++ /dev/null @@ -1 +0,0 @@ -Added pillar templating to vault policies diff --git a/changelog/45823.fixed.md b/changelog/45823.fixed.md deleted file mode 100644 index e419ad4f7e91..000000000000 --- a/changelog/45823.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix fun_args missing from syndic returns diff --git a/changelog/47201.fixed.md b/changelog/47201.fixed.md deleted file mode 100644 index 0dbb18186a9e..000000000000 --- a/changelog/47201.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True diff --git a/changelog/48609.changed.md b/changelog/48609.changed.md deleted file mode 100644 index 92f1dc00f8e5..000000000000 --- a/changelog/48609.changed.md +++ /dev/null @@ -1 +0,0 @@ -More intelligent diffing in changes of file.serialize state. diff --git a/changelog/49310.fixed.md b/changelog/49310.fixed.md deleted file mode 100644 index ce6c5536b30b..000000000000 --- a/changelog/49310.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Issue #49310: Allow users to touch a file with Unix date of birth diff --git a/changelog/49430.changed.md b/changelog/49430.changed.md deleted file mode 100644 index 4e94ec3f596f..000000000000 --- a/changelog/49430.changed.md +++ /dev/null @@ -1 +0,0 @@ -Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. diff --git a/changelog/51088.added.md b/changelog/51088.added.md deleted file mode 100644 index a87c8890668b..000000000000 --- a/changelog/51088.added.md +++ /dev/null @@ -1 +0,0 @@ -Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray diff --git a/changelog/51620.fixed.md b/changelog/51620.fixed.md deleted file mode 100644 index 79deac1397fe..000000000000 --- a/changelog/51620.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Do not raise an exception in pkg.info_installed on nonzero return code diff --git a/changelog/51739.fixed.md b/changelog/51739.fixed.md deleted file mode 100644 index 97ee15c00197..000000000000 --- a/changelog/51739.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. diff --git a/changelog/52167.fixed.md b/changelog/52167.fixed.md deleted file mode 100644 index 4a13253aaaa6..000000000000 --- a/changelog/52167.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. diff --git a/changelog/52354.fixed.md b/changelog/52354.fixed.md deleted file mode 100644 index af885d77fa4f..000000000000 --- a/changelog/52354.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Don't check for cached pillar errors on state.apply diff --git a/changelog/52400.fixed.md b/changelog/52400.fixed.md deleted file mode 100644 index 1d806f80cbcf..000000000000 --- a/changelog/52400.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. diff --git a/changelog/53353.fixed.md b/changelog/53353.fixed.md deleted file mode 100644 index cadda5448ccf..000000000000 --- a/changelog/53353.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. diff --git a/changelog/53377.fixed.md b/changelog/53377.fixed.md deleted file mode 100644 index d1eac3cfff3e..000000000000 --- a/changelog/53377.fixed.md +++ /dev/null @@ -1 +0,0 @@ -When user_create or user_remove fail, return False instead of returning the error. diff --git a/changelog/53914.fixed.md b/changelog/53914.fixed.md deleted file mode 100644 index 5b08af666471..000000000000 --- a/changelog/53914.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Include sync_roster when sync_all is called. diff --git a/changelog/53988.fixed.md b/changelog/53988.fixed.md deleted file mode 100644 index 8220d4acddd6..000000000000 --- a/changelog/53988.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Avoid warning noise in lograte.get diff --git a/changelog/54347.fixed.md b/changelog/54347.fixed.md deleted file mode 100644 index 307e630eccf6..000000000000 --- a/changelog/54347.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed listing revoked keys with gpg.list_keys diff --git a/changelog/54508.fixed.md b/changelog/54508.fixed.md deleted file mode 100644 index 0292039705a1..000000000000 --- a/changelog/54508.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix mount.mounted does not handle blanks properly diff --git a/changelog/54679.added.md b/changelog/54679.added.md deleted file mode 100644 index c0d663f8926b..000000000000 --- a/changelog/54679.added.md +++ /dev/null @@ -1 +0,0 @@ -A new salt-ssh roster that generates a roster by parses a known_hosts file. diff --git a/changelog/54682.fixed.md b/changelog/54682.fixed.md deleted file mode 100644 index 87a958c2a79e..000000000000 --- a/changelog/54682.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. diff --git a/changelog/54713.added.md b/changelog/54713.added.md deleted file mode 100644 index 174564b4162e..000000000000 --- a/changelog/54713.added.md +++ /dev/null @@ -1 +0,0 @@ -Added Windows Event Viewer support diff --git a/changelog/54907.fixed.md b/changelog/54907.fixed.md deleted file mode 100644 index 1f29c53b1adf..000000000000 --- a/changelog/54907.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix spelling error for python_shell argument in dpkg_lower module diff --git a/changelog/55226.fixed.md b/changelog/55226.fixed.md deleted file mode 100644 index 6947dc684aa7..000000000000 --- a/changelog/55226.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) diff --git a/changelog/55269.fixed.md b/changelog/55269.fixed.md deleted file mode 100644 index 42543738580f..000000000000 --- a/changelog/55269.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed malformed state return when testing file.managed with unavailable source file diff --git a/changelog/55295.deprecated.md b/changelog/55295.deprecated.md deleted file mode 100644 index 71c9da60baae..000000000000 --- a/changelog/55295.deprecated.md +++ /dev/null @@ -1,2 +0,0 @@ -renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. -`keep_jobs` will be removed in the Argon release diff --git a/changelog/56013.added.md b/changelog/56013.added.md deleted file mode 100644 index a249f407a456..000000000000 --- a/changelog/56013.added.md +++ /dev/null @@ -1 +0,0 @@ -Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file diff --git a/changelog/56016.fixed.md b/changelog/56016.fixed.md deleted file mode 100644 index de186daa9d09..000000000000 --- a/changelog/56016.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Included stdout in error message for Zypper calls in zypperpkg module. diff --git a/changelog/56093.fixed.md b/changelog/56093.fixed.md deleted file mode 100644 index 2b898dd72f37..000000000000 --- a/changelog/56093.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed pillar.filter_by with salt-ssh diff --git a/changelog/57139.fixed.md b/changelog/57139.fixed.md deleted file mode 100644 index af460e9b2a71..000000000000 --- a/changelog/57139.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix boto_route53 issue with (multiple) VPCs. diff --git a/changelog/57180.fixed.md b/changelog/57180.fixed.md deleted file mode 100644 index af885d77fa4f..000000000000 --- a/changelog/57180.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Don't check for cached pillar errors on state.apply diff --git a/changelog/57463.fixed.md b/changelog/57463.fixed.md deleted file mode 100644 index d0678bf0e286..000000000000 --- a/changelog/57463.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Remove log from mine runner which was not used. diff --git a/changelog/57500.added.md b/changelog/57500.added.md deleted file mode 100644 index 91af2f8bae25..000000000000 --- a/changelog/57500.added.md +++ /dev/null @@ -1 +0,0 @@ -Added resource tagging functions to boto_dynamodb execution module diff --git a/changelog/57535.fixed.md b/changelog/57535.fixed.md deleted file mode 100644 index 23e8ceaff939..000000000000 --- a/changelog/57535.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. diff --git a/changelog/57803.changed.md b/changelog/57803.changed.md deleted file mode 100644 index f278bd479a76..000000000000 --- a/changelog/57803.changed.md +++ /dev/null @@ -1,2 +0,0 @@ -``umask`` is now a global state argument, instead of only applying to ``cmd`` -states. diff --git a/changelog/57842.fixed.md b/changelog/57842.fixed.md deleted file mode 100644 index c708020bd1ae..000000000000 --- a/changelog/57842.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Updating Slack engine to use slack_bolt library. diff --git a/changelog/58165.fixed.md b/changelog/58165.fixed.md deleted file mode 100644 index 31a76928f7a5..000000000000 --- a/changelog/58165.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. diff --git a/changelog/58202.fixed.md b/changelog/58202.fixed.md deleted file mode 100644 index b4cf0c0add78..000000000000 --- a/changelog/58202.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix salt.modules.pip:is_installed doesn't handle locally installed packages diff --git a/changelog/58297.fixed.md b/changelog/58297.fixed.md deleted file mode 100644 index 4aaa88034131..000000000000 --- a/changelog/58297.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. diff --git a/changelog/58423.fixed.md b/changelog/58423.fixed.md deleted file mode 100644 index ea678fb69856..000000000000 --- a/changelog/58423.fixed.md +++ /dev/null @@ -1,2 +0,0 @@ -linux_shadow: Fix cases where malformed shadow entries cause `user.present` -states to fail. diff --git a/changelog/58729.fixed.md b/changelog/58729.fixed.md deleted file mode 100644 index 6e806784a369..000000000000 --- a/changelog/58729.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed salt.utils.compat.cmp to work with dictionaries diff --git a/changelog/58953.fixed.md b/changelog/58953.fixed.md deleted file mode 100644 index fd8b01baec65..000000000000 --- a/changelog/58953.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed formatting for terse output mode diff --git a/changelog/58971.changed.md b/changelog/58971.changed.md deleted file mode 100644 index d0d54ec84a15..000000000000 --- a/changelog/58971.changed.md +++ /dev/null @@ -1 +0,0 @@ -Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. diff --git a/changelog/58986.added.md b/changelog/58986.added.md deleted file mode 100644 index ba03c9324b77..000000000000 --- a/changelog/58986.added.md +++ /dev/null @@ -1,5 +0,0 @@ -Added `openvswitch_db` state module and functions `bridge_to_parent`, -`bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. -Also added optional `parent` and `vlan` parameters to the -`openvswitch_bridge.present` state module function and the -`openvswitch.bridge_create` execution module function. diff --git a/changelog/59017.fixed.md b/changelog/59017.fixed.md deleted file mode 100644 index a34acd3429de..000000000000 --- a/changelog/59017.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed RecursiveDictDiffer with added nested dicts diff --git a/changelog/59169.fixed.md b/changelog/59169.fixed.md deleted file mode 100644 index 718be2a22e3b..000000000000 --- a/changelog/59169.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. diff --git a/changelog/59183.fixed.md b/changelog/59183.fixed.md deleted file mode 100644 index 953c94b2b5da..000000000000 --- a/changelog/59183.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed saltnado websockets disconnecting immediately diff --git a/changelog/59315.fixed.md b/changelog/59315.fixed.md deleted file mode 100644 index 67fa822f6785..000000000000 --- a/changelog/59315.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. diff --git a/changelog/59339.fixed.md b/changelog/59339.fixed.md deleted file mode 100644 index af885d77fa4f..000000000000 --- a/changelog/59339.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Don't check for cached pillar errors on state.apply diff --git a/changelog/59439.changed.md b/changelog/59439.changed.md deleted file mode 100644 index 282c55a658fe..000000000000 --- a/changelog/59439.changed.md +++ /dev/null @@ -1 +0,0 @@ -Improve support for listing macOS brew casks diff --git a/changelog/59585.fixed.md b/changelog/59585.fixed.md deleted file mode 100644 index c73593f7dec7..000000000000 --- a/changelog/59585.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix postgres_privileges.present not idempotent for functions diff --git a/changelog/59766.fixed.md b/changelog/59766.fixed.md deleted file mode 100644 index 24feb19ddf50..000000000000 --- a/changelog/59766.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. diff --git a/changelog/59786.fixed.md b/changelog/59786.fixed.md deleted file mode 100644 index 00612d70fbf1..000000000000 --- a/changelog/59786.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. diff --git a/changelog/59909.fixed.md b/changelog/59909.fixed.md deleted file mode 100644 index b16c524c7a92..000000000000 --- a/changelog/59909.fixed.md +++ /dev/null @@ -1 +0,0 @@ -add load balancing policy default option and ensure the module can be executed with arguments from CLI diff --git a/changelog/59917.removed.md b/changelog/59917.removed.md deleted file mode 100644 index a91e76d6acdf..000000000000 --- a/changelog/59917.removed.md +++ /dev/null @@ -1 +0,0 @@ -Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. diff --git a/changelog/60003.fixed.md b/changelog/60003.fixed.md deleted file mode 100644 index 7b0ccd4010c5..000000000000 --- a/changelog/60003.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix salt-ssh when using imports with extra-filerefs. diff --git a/changelog/60154.added.md b/changelog/60154.added.md deleted file mode 100644 index 3957b19ce93b..000000000000 --- a/changelog/60154.added.md +++ /dev/null @@ -1 +0,0 @@ -State module to manage SysFS attributes diff --git a/changelog/60170.fixed.md b/changelog/60170.fixed.md deleted file mode 100644 index b19324a76cd0..000000000000 --- a/changelog/60170.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed cache directory corruption startup error diff --git a/changelog/60227.fixed.md b/changelog/60227.fixed.md deleted file mode 100644 index 60d46b466364..000000000000 --- a/changelog/60227.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Update docs remove dry_run in docstring of file.blockreplace state. diff --git a/changelog/60249.fixed.md b/changelog/60249.fixed.md deleted file mode 100644 index f44049f3f2bd..000000000000 --- a/changelog/60249.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Adds Parrot to OS_Family_Map in grains. diff --git a/changelog/60365.fixed.md b/changelog/60365.fixed.md deleted file mode 100644 index 778334ee4b39..000000000000 --- a/changelog/60365.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions diff --git a/changelog/60430.added.md b/changelog/60430.added.md deleted file mode 100644 index cee2d2758543..000000000000 --- a/changelog/60430.added.md +++ /dev/null @@ -1 +0,0 @@ -Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. diff --git a/changelog/60467.fixed.md b/changelog/60467.fixed.md deleted file mode 100644 index 6adc3e29cbfc..000000000000 --- a/changelog/60467.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Use return code in iptables --check to verify rule exists. diff --git a/changelog/60476.removed.md b/changelog/60476.removed.md deleted file mode 100644 index e72141263e1c..000000000000 --- a/changelog/60476.removed.md +++ /dev/null @@ -1 +0,0 @@ -Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 diff --git a/changelog/60518.added.md b/changelog/60518.added.md deleted file mode 100644 index 322ee04da17f..000000000000 --- a/changelog/60518.added.md +++ /dev/null @@ -1 +0,0 @@ -Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) diff --git a/changelog/60557.fixed.md b/changelog/60557.fixed.md deleted file mode 100644 index 0634bc839528..000000000000 --- a/changelog/60557.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix regression pip.installed does not pass env_vars when calling pip.list diff --git a/changelog/60700.added.md b/changelog/60700.added.md deleted file mode 100644 index 91eee4260d5e..000000000000 --- a/changelog/60700.added.md +++ /dev/null @@ -1 +0,0 @@ -Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. diff --git a/changelog/60722.added.md b/changelog/60722.added.md deleted file mode 100644 index 183fbc8b8e95..000000000000 --- a/changelog/60722.added.md +++ /dev/null @@ -1 +0,0 @@ -Added .0 back to our versioning scheme for future versions (e.g. 3006.0) diff --git a/changelog/60853.fixed.md b/changelog/60853.fixed.md deleted file mode 100644 index 3264c66ad7cd..000000000000 --- a/changelog/60853.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix xfs module when additional output included in mkfs.xfs command. diff --git a/changelog/60915.fixed.md b/changelog/60915.fixed.md deleted file mode 100644 index f3c2644c723e..000000000000 --- a/changelog/60915.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed parsing new format of terraform states in roster.terraform diff --git a/changelog/60994.fixed.md b/changelog/60994.fixed.md deleted file mode 100644 index 3c4be7014888..000000000000 --- a/changelog/60994.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed recognizing installed ARMv7 rpm packages in compatible architectures. diff --git a/changelog/60995.fixed.md b/changelog/60995.fixed.md deleted file mode 100644 index 9a81e1b254a5..000000000000 --- a/changelog/60995.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixing changes dict in pkg state to be consistent when installing and test=True. diff --git a/changelog/60997.fixed.md b/changelog/60997.fixed.md deleted file mode 100644 index 779005f5014f..000000000000 --- a/changelog/60997.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix cron.present duplicating entries when changing timespec to special. diff --git a/changelog/61083.fixed.md b/changelog/61083.fixed.md deleted file mode 100644 index 2d78d777bd7a..000000000000 --- a/changelog/61083.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Made salt-ssh respect --wipe again diff --git a/changelog/61092.fixed.md b/changelog/61092.fixed.md deleted file mode 100644 index 6ca66839c962..000000000000 --- a/changelog/61092.fixed.md +++ /dev/null @@ -1,3 +0,0 @@ -state.orchestrate_single only passes a pillar if it is set to the state -function. This allows it to be used with state functions that don't accept a -pillar keyword argument. diff --git a/changelog/61122.fixed.md b/changelog/61122.fixed.md deleted file mode 100644 index 85fc69674aee..000000000000 --- a/changelog/61122.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix ipset state when the comment kwarg is set. diff --git a/changelog/61153.added.md b/changelog/61153.added.md deleted file mode 100644 index ad7b978a7f2f..000000000000 --- a/changelog/61153.added.md +++ /dev/null @@ -1 +0,0 @@ -Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. diff --git a/changelog/61245.added.md b/changelog/61245.added.md deleted file mode 100644 index 83b0b1ae60d1..000000000000 --- a/changelog/61245.added.md +++ /dev/null @@ -1 +0,0 @@ -Added node label support for GCE diff --git a/changelog/61319.added.md b/changelog/61319.added.md deleted file mode 100644 index 990bd79c9747..000000000000 --- a/changelog/61319.added.md +++ /dev/null @@ -1 +0,0 @@ -Support the --priority flag when adding sources to Chocolatey. diff --git a/changelog/61335.added.md b/changelog/61335.added.md deleted file mode 100644 index 25d64d763f78..000000000000 --- a/changelog/61335.added.md +++ /dev/null @@ -1 +0,0 @@ -Add namespace option to ext_pillar.http_json diff --git a/changelog/61409.changed.md b/changelog/61409.changed.md deleted file mode 100644 index 1c210d80b343..000000000000 --- a/changelog/61409.changed.md +++ /dev/null @@ -1,3 +0,0 @@ -Add missing MariaDB Grants to mysql module. -MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. -Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. diff --git a/changelog/61420.added.md b/changelog/61420.added.md deleted file mode 100644 index 5ca782ff9169..000000000000 --- a/changelog/61420.added.md +++ /dev/null @@ -1 +0,0 @@ -Added a filter function to ps module to get a list of processes on a minion according to their state. diff --git a/changelog/61422.fixed.md b/changelog/61422.fixed.md deleted file mode 100644 index ee9b1579e25a..000000000000 --- a/changelog/61422.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix issue with archive.unzip where the password was not being encoded for the extract function diff --git a/changelog/61433.added.md b/changelog/61433.added.md deleted file mode 100644 index 9cc27679ecee..000000000000 --- a/changelog/61433.added.md +++ /dev/null @@ -1 +0,0 @@ -Add postgres.timeout option to postgres module for limiting postgres query times diff --git a/changelog/61542.deprecated.md b/changelog/61542.deprecated.md deleted file mode 100644 index 121d646dd4bd..000000000000 --- a/changelog/61542.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -Removing all references to napalm-base which is no longer supported. diff --git a/changelog/61618.fixed.md b/changelog/61618.fixed.md deleted file mode 100644 index 0f38289e2cbe..000000000000 --- a/changelog/61618.fixed.md +++ /dev/null @@ -1,4 +0,0 @@ -Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux -Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, -`osfinger` grains if lsb-release is installed or not. They have been changed to -only derive these OS grains from `/etc/os-release`. diff --git a/changelog/61619.fixed.md b/changelog/61619.fixed.md deleted file mode 100644 index 4f9159792c90..000000000000 --- a/changelog/61619.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. diff --git a/changelog/61650.fixed.md b/changelog/61650.fixed.md deleted file mode 100644 index c60f78baa95a..000000000000 --- a/changelog/61650.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix ssh config roster to correctly parse the ssh config files that contain spaces. diff --git a/changelog/61727.fixed.md b/changelog/61727.fixed.md deleted file mode 100644 index 296c6528e0ac..000000000000 --- a/changelog/61727.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix SoftLayer configuration not raising an exception when a domain is missing diff --git a/changelog/61789.fixed.md b/changelog/61789.fixed.md deleted file mode 100644 index e7cae60262d4..000000000000 --- a/changelog/61789.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry diff --git a/changelog/61805.fixed.md b/changelog/61805.fixed.md deleted file mode 100644 index 1a0339fb6a63..000000000000 --- a/changelog/61805.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. diff --git a/changelog/61814.fixed.md b/changelog/61814.fixed.md deleted file mode 100644 index 6d06855ebb80..000000000000 --- a/changelog/61814.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed malformed state return when merge-serializing to an improperly formatted file diff --git a/changelog/61816.fixed.md b/changelog/61816.fixed.md deleted file mode 100644 index f45856f2b5e1..000000000000 --- a/changelog/61816.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) diff --git a/changelog/61821.fixed.md b/changelog/61821.fixed.md deleted file mode 100644 index 6ff1221f5436..000000000000 --- a/changelog/61821.fixed.md +++ /dev/null @@ -1 +0,0 @@ -When deleting the vault cache, also delete from the session cache diff --git a/changelog/61827.fixed.md b/changelog/61827.fixed.md deleted file mode 100644 index 57747ce1ee5d..000000000000 --- a/changelog/61827.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. diff --git a/changelog/61857.added.md b/changelog/61857.added.md deleted file mode 100644 index d9400702845f..000000000000 --- a/changelog/61857.added.md +++ /dev/null @@ -1 +0,0 @@ -Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. diff --git a/changelog/61859.fixed.md b/changelog/61859.fixed.md deleted file mode 100644 index 1bfcbec99b07..000000000000 --- a/changelog/61859.fixed.md +++ /dev/null @@ -1 +0,0 @@ -win_lgpo: Display conflicting policy names when more than one policy is found diff --git a/changelog/61860.fixed.md b/changelog/61860.fixed.md deleted file mode 100644 index 3b64fade1953..000000000000 --- a/changelog/61860.fixed.md +++ /dev/null @@ -1 +0,0 @@ -win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy diff --git a/changelog/61931.added.md b/changelog/61931.added.md deleted file mode 100644 index c6b1d318e78a..000000000000 --- a/changelog/61931.added.md +++ /dev/null @@ -1 +0,0 @@ -Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. diff --git a/changelog/61966.fixed.md b/changelog/61966.fixed.md deleted file mode 100644 index e772fcf33af4..000000000000 --- a/changelog/61966.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed listing minions on OpenBSD diff --git a/changelog/62009.added.md b/changelog/62009.added.md deleted file mode 100644 index 7a8303d4fecb..000000000000 --- a/changelog/62009.added.md +++ /dev/null @@ -1 +0,0 @@ -'tcp' transport is now available in ipv6-only network diff --git a/changelog/62009.deprecated.md b/changelog/62009.deprecated.md deleted file mode 100644 index e9455f07a7b5..000000000000 --- a/changelog/62009.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py diff --git a/changelog/62019.fixed.md b/changelog/62019.fixed.md deleted file mode 100644 index 8a772963e47d..000000000000 --- a/changelog/62019.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Make Salt to return an error on "pkg" modules and states when targeting duplicated package names diff --git a/changelog/62022.fixed.md b/changelog/62022.fixed.md deleted file mode 100644 index 49084228f9c6..000000000000 --- a/changelog/62022.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix return of REST-returned permissions when auth_list is set diff --git a/changelog/62029.fixed.md b/changelog/62029.fixed.md deleted file mode 100644 index b468d5a14989..000000000000 --- a/changelog/62029.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. diff --git a/changelog/62030.fixed.md b/changelog/62030.fixed.md deleted file mode 100644 index bd6046360695..000000000000 --- a/changelog/62030.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() diff --git a/changelog/62031.added.md b/changelog/62031.added.md deleted file mode 100644 index f0b66ff96f55..000000000000 --- a/changelog/62031.added.md +++ /dev/null @@ -1 +0,0 @@ -Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). diff --git a/changelog/62032.fixed.md b/changelog/62032.fixed.md deleted file mode 100644 index ceb3cc89b95d..000000000000 --- a/changelog/62032.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix attr=all handling in pkg.list_pkgs() (yum/zypper). diff --git a/changelog/62053.fixed.md b/changelog/62053.fixed.md deleted file mode 100644 index 24d281b5f477..000000000000 --- a/changelog/62053.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap diff --git a/changelog/62058.fixed.md b/changelog/62058.fixed.md deleted file mode 100644 index 9329631635cb..000000000000 --- a/changelog/62058.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias diff --git a/changelog/62074.fixed.md b/changelog/62074.fixed.md deleted file mode 100644 index 9910d51c1b73..000000000000 --- a/changelog/62074.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix ordering of args to libcloud_storage.download_object module diff --git a/changelog/62082.fixed.md b/changelog/62082.fixed.md deleted file mode 100644 index 02e5f5ff407e..000000000000 --- a/changelog/62082.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Ignore extend declarations in sls files that are excluded. diff --git a/changelog/62089.changed.md b/changelog/62089.changed.md deleted file mode 100644 index 09feb2e92251..000000000000 --- a/changelog/62089.changed.md +++ /dev/null @@ -1 +0,0 @@ -Use VENV_PIP_TARGET environment variable as a default target for pip if present. diff --git a/changelog/62101.fixed.md b/changelog/62101.fixed.md deleted file mode 100644 index 99de5bbf0a3e..000000000000 --- a/changelog/62101.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Remove leftover usage of impacket diff --git a/changelog/62120.added.md b/changelog/62120.added.md deleted file mode 100644 index 4303d124f0b7..000000000000 --- a/changelog/62120.added.md +++ /dev/null @@ -1,4 +0,0 @@ -Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. -Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. -Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. -Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. diff --git a/changelog/62120.fixed.md b/changelog/62120.fixed.md deleted file mode 100644 index 22a97113833c..000000000000 --- a/changelog/62120.fixed.md +++ /dev/null @@ -1,4 +0,0 @@ -Pass executable path from _get_path_exec() is used when calling the program. -The $HOME env is no longer modified globally. -Only trailing newlines are stripped from the fetched secret. -Pass process arguments are handled in a secure way. diff --git a/changelog/62131.fixed.md b/changelog/62131.fixed.md deleted file mode 100644 index 67c772568f37..000000000000 --- a/changelog/62131.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Ignore some command return codes in openbsdrcctl_service to prevent spurious errors diff --git a/changelog/62139.fixed.md b/changelog/62139.fixed.md deleted file mode 100644 index d9dfd2428a4b..000000000000 --- a/changelog/62139.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". diff --git a/changelog/62152.fixed.md b/changelog/62152.fixed.md deleted file mode 100644 index 752188afe75b..000000000000 --- a/changelog/62152.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up diff --git a/changelog/62168.changed.md b/changelog/62168.changed.md deleted file mode 100644 index 564579b4ba7b..000000000000 --- a/changelog/62168.changed.md +++ /dev/null @@ -1 +0,0 @@ -Disabled FQDNs grains on macOS by default diff --git a/changelog/62178.added.md b/changelog/62178.added.md deleted file mode 100644 index 6ada29465845..000000000000 --- a/changelog/62178.added.md +++ /dev/null @@ -1 +0,0 @@ -Add file.pruned state and expanded file.rmdir exec module functionality diff --git a/changelog/62185.fixed.md b/changelog/62185.fixed.md deleted file mode 100644 index e0329c8bf33b..000000000000 --- a/changelog/62185.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Restored Salt's DeprecationWarnings diff --git a/changelog/62197.fixed.md b/changelog/62197.fixed.md deleted file mode 100644 index 081a576eb121..000000000000 --- a/changelog/62197.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed issue with forward slashes on Windows with file.recurse and clean=True diff --git a/changelog/62198.fixed.md b/changelog/62198.fixed.md deleted file mode 100644 index 9a33558a11d2..000000000000 --- a/changelog/62198.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Recognize OSMC as Debian-based diff --git a/changelog/62204.fixed.md b/changelog/62204.fixed.md deleted file mode 100644 index 59f1914593f4..000000000000 --- a/changelog/62204.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed Zypper module failing on RPM lock file being temporarily unavailable. diff --git a/changelog/62211.fixed.md b/changelog/62211.fixed.md deleted file mode 100644 index 92a021023a1c..000000000000 --- a/changelog/62211.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Improved error handling and diagnostics in the proxmox salt-cloud driver diff --git a/changelog/62218.changed.md b/changelog/62218.changed.md deleted file mode 100644 index 52fca5c39276..000000000000 --- a/changelog/62218.changed.md +++ /dev/null @@ -1 +0,0 @@ -Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated diff --git a/changelog/62220.fixed.md b/changelog/62220.fixed.md deleted file mode 100644 index ab3eec5d1519..000000000000 --- a/changelog/62220.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Added EndeavourOS to the Arch os_family. diff --git a/changelog/62235.fixed.md b/changelog/62235.fixed.md deleted file mode 100644 index a38c9e127d47..000000000000 --- a/changelog/62235.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 diff --git a/changelog/62239.removed.md b/changelog/62239.removed.md deleted file mode 100644 index c065051cc9ae..000000000000 --- a/changelog/62239.removed.md +++ /dev/null @@ -1 +0,0 @@ -Removed ``runtests`` targets from ``noxfile.py`` diff --git a/changelog/62248.fixed.md b/changelog/62248.fixed.md deleted file mode 100644 index 50cb42bf406f..000000000000 --- a/changelog/62248.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix pkg.version_cmp on openEuler and a few other os flavors. diff --git a/changelog/62273.fixed.md b/changelog/62273.fixed.md deleted file mode 100644 index 1f4caad60855..000000000000 --- a/changelog/62273.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix localhost detection in glusterfs.peers diff --git a/changelog/62275.added.md b/changelog/62275.added.md deleted file mode 100644 index 7e9d90525453..000000000000 --- a/changelog/62275.added.md +++ /dev/null @@ -1 +0,0 @@ -Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation diff --git a/changelog/62281.fixed.md b/changelog/62281.fixed.md deleted file mode 100644 index f8bf23a7ec7c..000000000000 --- a/changelog/62281.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix Salt Package Manager (SPM) exception when calling spm create_repo . diff --git a/changelog/62283.fixed.md b/changelog/62283.fixed.md deleted file mode 100644 index 2a5d60d658bf..000000000000 --- a/changelog/62283.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix matcher slowness due to loader invocation diff --git a/changelog/62323.fixed.md b/changelog/62323.fixed.md deleted file mode 100644 index 02fc14e84799..000000000000 --- a/changelog/62323.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. diff --git a/changelog/62334.fixed.md b/changelog/62334.fixed.md deleted file mode 100644 index 37e12d2a86fb..000000000000 --- a/changelog/62334.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect diff --git a/changelog/62336.fixed.md b/changelog/62336.fixed.md deleted file mode 100644 index 300de31a0176..000000000000 --- a/changelog/62336.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix pyobjects renderer access to opts and sls diff --git a/changelog/62346.changed.md b/changelog/62346.changed.md deleted file mode 100644 index 1e131d3f7153..000000000000 --- a/changelog/62346.changed.md +++ /dev/null @@ -1 +0,0 @@ -Enhance capture of error messages for Zypper calls in zypperpkg module. diff --git a/changelog/62366.added.md b/changelog/62366.added.md deleted file mode 100644 index 00871a8ca85b..000000000000 --- a/changelog/62366.added.md +++ /dev/null @@ -1 +0,0 @@ -Added the ability to remove a KB using the DISM state/execution modules diff --git a/changelog/62372.fixed.md b/changelog/62372.fixed.md deleted file mode 100644 index 1d460b9d0e67..000000000000 --- a/changelog/62372.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix use of random shuffle and sample functions as Jinja filters diff --git a/changelog/62377.fixed.md b/changelog/62377.fixed.md deleted file mode 100644 index 1a4bef889cb6..000000000000 --- a/changelog/62377.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix groups with duplicate GIDs are not returned by get_group_list diff --git a/changelog/62381.added.md b/changelog/62381.added.md deleted file mode 100644 index 8bea04f2ea49..000000000000 --- a/changelog/62381.added.md +++ /dev/null @@ -1 +0,0 @@ -Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime diff --git a/changelog/62390.fixed.md b/changelog/62390.fixed.md deleted file mode 100644 index e5fee65205f8..000000000000 --- a/changelog/62390.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix the "zpool.present" state when enabling zpool features that are already active. diff --git a/changelog/62398.fixed.md b/changelog/62398.fixed.md deleted file mode 100644 index c5e10154aeb2..000000000000 --- a/changelog/62398.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix ability to execute remote file client methods in saltcheck diff --git a/changelog/62400.fixed.md b/changelog/62400.fixed.md deleted file mode 100644 index 290866b1f533..000000000000 --- a/changelog/62400.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x diff --git a/changelog/62405.fixed.md b/changelog/62405.fixed.md deleted file mode 100644 index a8077992fe1e..000000000000 --- a/changelog/62405.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. diff --git a/changelog/62408.fixed.md b/changelog/62408.fixed.md deleted file mode 100644 index 741af586a9c8..000000000000 --- a/changelog/62408.fixed.md +++ /dev/null @@ -1 +0,0 @@ -When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. diff --git a/changelog/62426.fixed.md b/changelog/62426.fixed.md deleted file mode 100644 index 03242a22d01f..000000000000 --- a/changelog/62426.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Added directory mode for file.copy with makedirs diff --git a/changelog/62432.removed.md b/changelog/62432.removed.md deleted file mode 100644 index b696136a2081..000000000000 --- a/changelog/62432.removed.md +++ /dev/null @@ -1,5 +0,0 @@ -Removed the PyObjC dependency. - -This addresses problems with building a one dir build for macOS. -It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). -Since it's currently not being used, it's removed. diff --git a/changelog/62435.fixed.md b/changelog/62435.fixed.md deleted file mode 100644 index 54286a588502..000000000000 --- a/changelog/62435.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Provide better error handling in the various napalm proxy minion functions when the device is not accessible. diff --git a/changelog/62439.fixed.md b/changelog/62439.fixed.md deleted file mode 100644 index 5863b52c2d2f..000000000000 --- a/changelog/62439.fixed.md +++ /dev/null @@ -1 +0,0 @@ -When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. diff --git a/changelog/62446.added.md b/changelog/62446.added.md deleted file mode 100644 index 86ad064ee97f..000000000000 --- a/changelog/62446.added.md +++ /dev/null @@ -1 +0,0 @@ -Add ability to provide conditions which convert normal state actions to no-op when true diff --git a/changelog/62451.fixed.md b/changelog/62451.fixed.md deleted file mode 100644 index 13164f86955e..000000000000 --- a/changelog/62451.fixed.md +++ /dev/null @@ -1 +0,0 @@ -The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module diff --git a/changelog/62461.fixed.md b/changelog/62461.fixed.md deleted file mode 100644 index 4f8f76ad9433..000000000000 --- a/changelog/62461.fixed.md +++ /dev/null @@ -1 +0,0 @@ -sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. diff --git a/changelog/62474.fixed.md b/changelog/62474.fixed.md deleted file mode 100644 index bf45b04872a0..000000000000 --- a/changelog/62474.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed parsing CDROM apt sources diff --git a/changelog/62480.added.md b/changelog/62480.added.md deleted file mode 100644 index 50044e2d1d95..000000000000 --- a/changelog/62480.added.md +++ /dev/null @@ -1 +0,0 @@ -Added debug log messages displaying the command being run when installing packages on Windows diff --git a/changelog/62483.fixed.md b/changelog/62483.fixed.md deleted file mode 100644 index 54201adde5d4..000000000000 --- a/changelog/62483.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Update sanitizing masking for Salt SSH to include additional password like strings. diff --git a/changelog/62485.deprecated.md b/changelog/62485.deprecated.md deleted file mode 100644 index 64fdf5869416..000000000000 --- a/changelog/62485.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. diff --git a/changelog/62496.added.md b/changelog/62496.added.md deleted file mode 100644 index 873138fde967..000000000000 --- a/changelog/62496.added.md +++ /dev/null @@ -1 +0,0 @@ -Add biosvendor grain diff --git a/changelog/62499.fixed.md b/changelog/62499.fixed.md deleted file mode 100644 index 7b307a19d1da..000000000000 --- a/changelog/62499.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix user/group checking on file state functions in the test mode. diff --git a/changelog/62502.fixed.md b/changelog/62502.fixed.md deleted file mode 100644 index 5de34ad71be4..000000000000 --- a/changelog/62502.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. diff --git a/changelog/62508.added.md b/changelog/62508.added.md deleted file mode 100644 index 7bf68b441d8f..000000000000 --- a/changelog/62508.added.md +++ /dev/null @@ -1 +0,0 @@ -Add ifelse Jinja function as found in CFEngine diff --git a/changelog/62519.fixed.md b/changelog/62519.fixed.md deleted file mode 100644 index 4d4ac2c746a7..000000000000 --- a/changelog/62519.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. diff --git a/changelog/62521.fixed.md b/changelog/62521.fixed.md deleted file mode 100644 index 35e58e902980..000000000000 --- a/changelog/62521.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver diff --git a/changelog/62523.fixed.md b/changelog/62523.fixed.md deleted file mode 100644 index c77cc5d84c85..000000000000 --- a/changelog/62523.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix rendering of pyobjects states in saltcheck diff --git a/changelog/62527.fixed.md b/changelog/62527.fixed.md deleted file mode 100644 index 3c9b76ebc337..000000000000 --- a/changelog/62527.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt diff --git a/changelog/62539.added.md b/changelog/62539.added.md deleted file mode 100644 index 5f402d61c2d2..000000000000 --- a/changelog/62539.added.md +++ /dev/null @@ -1 +0,0 @@ -Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. diff --git a/changelog/62546.fixed.md b/changelog/62546.fixed.md deleted file mode 100644 index 68ada1d2bc54..000000000000 --- a/changelog/62546.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. diff --git a/changelog/62547.fixed.md b/changelog/62547.fixed.md deleted file mode 100644 index 44b4ff4113d6..000000000000 --- a/changelog/62547.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. diff --git a/changelog/62556.fixed.md b/changelog/62556.fixed.md deleted file mode 100644 index 0dc3a7933ec2..000000000000 --- a/changelog/62556.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix order specific mount.mounted options for persist diff --git a/changelog/62558.fixed.md b/changelog/62558.fixed.md deleted file mode 100644 index cdc2a8489db3..000000000000 --- a/changelog/62558.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed salt-cloud cloning a proxmox VM with a specified new vmid. diff --git a/changelog/62565.fixed.md b/changelog/62565.fixed.md deleted file mode 100644 index 398b82e6d850..000000000000 --- a/changelog/62565.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix runas with cmd module when using the onedir bundled packages diff --git a/changelog/62576.fixed.md b/changelog/62576.fixed.md deleted file mode 100644 index e7f26a80d5ee..000000000000 --- a/changelog/62576.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Update setproctitle version for all platforms diff --git a/changelog/62578.added.md b/changelog/62578.added.md deleted file mode 100644 index 1439da3da170..000000000000 --- a/changelog/62578.added.md +++ /dev/null @@ -1 +0,0 @@ -Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack diff --git a/changelog/62580.fixed.md b/changelog/62580.fixed.md deleted file mode 100644 index f0c807c30cc1..000000000000 --- a/changelog/62580.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver diff --git a/changelog/62587.fixed.md b/changelog/62587.fixed.md deleted file mode 100644 index fc5e2871d100..000000000000 --- a/changelog/62587.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Handle PermissionError when importing crypt when FIPS is enabled. diff --git a/changelog/62595.fixed.md b/changelog/62595.fixed.md deleted file mode 100644 index 5462ad7ddaca..000000000000 --- a/changelog/62595.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Correctly reraise exceptions in states.http diff --git a/changelog/62618.fixed.md b/changelog/62618.fixed.md deleted file mode 100644 index aeb1ecff6aab..000000000000 --- a/changelog/62618.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. diff --git a/changelog/62624.fixed.md b/changelog/62624.fixed.md deleted file mode 100644 index 661f97e3b463..000000000000 --- a/changelog/62624.fixed.md +++ /dev/null @@ -1 +0,0 @@ -updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. diff --git a/changelog/62633.fixed.md b/changelog/62633.fixed.md deleted file mode 100644 index 1ab74f9122be..000000000000 --- a/changelog/62633.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline diff --git a/changelog/62644.deprecated.md b/changelog/62644.deprecated.md deleted file mode 100644 index 10a5e5fbd58a..000000000000 --- a/changelog/62644.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -Deprecated defunct Django returner diff --git a/changelog/62651.fixed.md b/changelog/62651.fixed.md deleted file mode 100644 index 402fd42c1709..000000000000 --- a/changelog/62651.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed vault ext pillar return data for KV v2 diff --git a/changelog/62654.fixed.md b/changelog/62654.fixed.md deleted file mode 100644 index 67a265040adc..000000000000 --- a/changelog/62654.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top diff --git a/changelog/62657.fixed.md b/changelog/62657.fixed.md deleted file mode 100644 index d6e83556c0e5..000000000000 --- a/changelog/62657.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix groupadd.* functions hard code relative command name diff --git a/changelog/62670.fixed.md b/changelog/62670.fixed.md deleted file mode 100644 index 2fdcb6b8b848..000000000000 --- a/changelog/62670.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed pdbedit.create trying to use a bytes-like hash as string. diff --git a/changelog/62672.fixed.md b/changelog/62672.fixed.md deleted file mode 100644 index 579110cce919..000000000000 --- a/changelog/62672.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix depenency on legacy boto module in boto3 modules diff --git a/changelog/62676.fixed.md b/changelog/62676.fixed.md deleted file mode 100644 index 81e96f54669a..000000000000 --- a/changelog/62676.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Modified "_get_flags" function so that it returns regex flags instead of integers diff --git a/changelog/62678.added.md b/changelog/62678.added.md deleted file mode 100644 index c43db8f964ce..000000000000 --- a/changelog/62678.added.md +++ /dev/null @@ -1 +0,0 @@ -Increase file.tidied flexibility with regard to age and size diff --git a/changelog/62728.fixed.md b/changelog/62728.fixed.md deleted file mode 100644 index 770237cfd9c5..000000000000 --- a/changelog/62728.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Change startup ReqServer log messages from error to info level. diff --git a/changelog/62754.deprecated.md b/changelog/62754.deprecated.md deleted file mode 100644 index 04b44764ca00..000000000000 --- a/changelog/62754.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions diff --git a/changelog/62761.added.md b/changelog/62761.added.md deleted file mode 100644 index b0cd0d4a3cba..000000000000 --- a/changelog/62761.added.md +++ /dev/null @@ -1 +0,0 @@ -Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion diff --git a/changelog/62768.added.md b/changelog/62768.added.md deleted file mode 100644 index 85cf2b6b345d..000000000000 --- a/changelog/62768.added.md +++ /dev/null @@ -1 +0,0 @@ -Add atomic file operation for symlink changes diff --git a/changelog/62772.fixed.md b/changelog/62772.fixed.md deleted file mode 100644 index c81a3a5908ea..000000000000 --- a/changelog/62772.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix kmod.* functions hard code relative command name diff --git a/changelog/62793.fixed.md b/changelog/62793.fixed.md deleted file mode 100644 index 9e224483e3cb..000000000000 --- a/changelog/62793.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix mac_brew_pkg to work with null taps diff --git a/changelog/62795.fixed.md b/changelog/62795.fixed.md deleted file mode 100644 index bbd035493fc9..000000000000 --- a/changelog/62795.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. diff --git a/changelog/62817.fixed.md b/changelog/62817.fixed.md deleted file mode 100644 index ff335f2916b8..000000000000 --- a/changelog/62817.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode diff --git a/changelog/62818.fixed.md b/changelog/62818.fixed.md deleted file mode 100644 index 944d2227c7c2..000000000000 --- a/changelog/62818.fixed.md +++ /dev/null @@ -1,2 +0,0 @@ -Include UID and GID checks in modules.file.check_perms as well as comparing -ownership by username and group name. diff --git a/changelog/62826.fixed.md b/changelog/62826.fixed.md deleted file mode 100644 index 2f923e70e699..000000000000 --- a/changelog/62826.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly diff --git a/changelog/62845.fixed.md b/changelog/62845.fixed.md deleted file mode 100644 index 1bfa3f24293b..000000000000 --- a/changelog/62845.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Remove Azure deprecation messages from functions that always run w/ salt-cloud diff --git a/changelog/62854.fixed.md b/changelog/62854.fixed.md deleted file mode 100644 index 13e6df4fe390..000000000000 --- a/changelog/62854.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 diff --git a/changelog/62856.added.md b/changelog/62856.added.md deleted file mode 100644 index 94783785aa57..000000000000 --- a/changelog/62856.added.md +++ /dev/null @@ -1 +0,0 @@ -Add password/account locking/unlocking in user.present state on supported operating systems diff --git a/changelog/62858.fixed.md b/changelog/62858.fixed.md deleted file mode 100644 index bae029fc8959..000000000000 --- a/changelog/62858.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed master job scheduler using when diff --git a/changelog/62867.added.md b/changelog/62867.added.md deleted file mode 100644 index fd9ad1d278a0..000000000000 --- a/changelog/62867.added.md +++ /dev/null @@ -1 +0,0 @@ -Added onchange configuration for script engine diff --git a/changelog/62873.fixed.md b/changelog/62873.fixed.md deleted file mode 100644 index 10d1a8c573c3..000000000000 --- a/changelog/62873.fixed.md +++ /dev/null @@ -1 +0,0 @@ -LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding diff --git a/changelog/62878.fixed.md b/changelog/62878.fixed.md deleted file mode 100644 index edcc25ec1f06..000000000000 --- a/changelog/62878.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. diff --git a/changelog/62882.fixed.md b/changelog/62882.fixed.md deleted file mode 100644 index 015b55fdfafc..000000000000 --- a/changelog/62882.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed dockermod version_info function for docker-py 6.0.0+ diff --git a/changelog/62886.fixed.md b/changelog/62886.fixed.md deleted file mode 100644 index 0c20e2bfa4e9..000000000000 --- a/changelog/62886.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. diff --git a/changelog/62895.changed.md b/changelog/62895.changed.md deleted file mode 100644 index d5bff3866b9d..000000000000 --- a/changelog/62895.changed.md +++ /dev/null @@ -1 +0,0 @@ -Removed GPG_1_3_1 check diff --git a/changelog/62900.fixed.md b/changelog/62900.fixed.md deleted file mode 100644 index 437c1edcef23..000000000000 --- a/changelog/62900.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Updating various MongoDB module functions to work with latest version of pymongo. diff --git a/changelog/62914.deprecated.md b/changelog/62914.deprecated.md deleted file mode 100644 index 555240fc1638..000000000000 --- a/changelog/62914.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -Removing manufacture grain which has been deprecated. diff --git a/changelog/62915.deprecated.md b/changelog/62915.deprecated.md deleted file mode 100644 index b17d168b3b2f..000000000000 --- a/changelog/62915.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -Removing deprecated utils/boto3_elasticsearch.py diff --git a/changelog/62917.deprecated.md b/changelog/62917.deprecated.md deleted file mode 100644 index 0b0f522a84b3..000000000000 --- a/changelog/62917.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -Removing support for the now deprecated _ext_nodes from salt/master.py. diff --git a/changelog/62933.fixed.md b/changelog/62933.fixed.md deleted file mode 100644 index 1b34722a729b..000000000000 --- a/changelog/62933.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Restored channel for Syndic minions to send job returns to the Salt master. diff --git a/changelog/62934.fixed.md b/changelog/62934.fixed.md deleted file mode 100644 index 7a12ce3dd5c6..000000000000 --- a/changelog/62934.fixed.md +++ /dev/null @@ -1 +0,0 @@ -removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. diff --git a/changelog/62937.fixed.md b/changelog/62937.fixed.md deleted file mode 100644 index 09af559e1ec4..000000000000 --- a/changelog/62937.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang diff --git a/changelog/62940.fixed.md b/changelog/62940.fixed.md deleted file mode 100644 index 1bb38d7a71f7..000000000000 --- a/changelog/62940.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. diff --git a/changelog/62942.fixed.md b/changelog/62942.fixed.md deleted file mode 100644 index f11a010d5889..000000000000 --- a/changelog/62942.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix systemd_service.* functions hard code relative command name diff --git a/changelog/62953.fixed.md b/changelog/62953.fixed.md deleted file mode 100644 index df38b5c4ab01..000000000000 --- a/changelog/62953.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix file.symlink backupname operation can copy remote contents to local disk diff --git a/changelog/62968.fixed.md b/changelog/62968.fixed.md deleted file mode 100644 index 891c319c4230..000000000000 --- a/changelog/62968.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts diff --git a/changelog/62977.fixed.md b/changelog/62977.fixed.md deleted file mode 100644 index da2b640a6400..000000000000 --- a/changelog/62977.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed gpg_passphrase issue with gpg decrypt/encrypt functions diff --git a/changelog/62978.added.md b/changelog/62978.added.md deleted file mode 100644 index 3262ce53a0b5..000000000000 --- a/changelog/62978.added.md +++ /dev/null @@ -1 +0,0 @@ -Added output and bare functionality to export_key gpg module function diff --git a/changelog/62983.added.md b/changelog/62983.added.md deleted file mode 100644 index 673c9f2239ad..000000000000 --- a/changelog/62983.added.md +++ /dev/null @@ -1 +0,0 @@ -Add keyvalue serializer for environment files diff --git a/changelog/62986.fixed.md b/changelog/62986.fixed.md deleted file mode 100644 index e2e5c1b029aa..000000000000 --- a/changelog/62986.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix file.tidied FileNotFoundError diff --git a/changelog/62988.fixed.md b/changelog/62988.fixed.md deleted file mode 100644 index b525e2f38ac8..000000000000 --- a/changelog/62988.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed bug where module.wait states were detected as running legacy module.run syntax diff --git a/changelog/62993.fixed.md b/changelog/62993.fixed.md deleted file mode 100644 index de6e8dca6e74..000000000000 --- a/changelog/62993.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start diff --git a/changelog/63012.changed.md b/changelog/63012.changed.md deleted file mode 100644 index ab5dfae2f380..000000000000 --- a/changelog/63012.changed.md +++ /dev/null @@ -1 +0,0 @@ -Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. diff --git a/changelog/63013.fixed.md b/changelog/63013.fixed.md deleted file mode 100644 index 28cd29c7f372..000000000000 --- a/changelog/63013.fixed.md +++ /dev/null @@ -1,3 +0,0 @@ -The `__opts__` dunder dictionary is now added to the loader's `pack` if not -already present, which makes it accessible via the -`salt.loader.context.NamedLoaderContext` class. diff --git a/changelog/63024.fixed.md b/changelog/63024.fixed.md deleted file mode 100644 index e3544b9bc146..000000000000 --- a/changelog/63024.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts diff --git a/changelog/63025.fixed.md b/changelog/63025.fixed.md deleted file mode 100644 index 11546d63bb17..000000000000 --- a/changelog/63025.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix btrfs.subvolume_snapshot command failing diff --git a/changelog/63033.fixed.md b/changelog/63033.fixed.md deleted file mode 100644 index 11687bd183c3..000000000000 --- a/changelog/63033.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix file.retention_schedule always reports changes diff --git a/changelog/63042.added.md b/changelog/63042.added.md deleted file mode 100644 index 2999b8fcb91c..000000000000 --- a/changelog/63042.added.md +++ /dev/null @@ -1 +0,0 @@ -Add ability to ignore symlinks in file.tidied diff --git a/changelog/63050.changed.md b/changelog/63050.changed.md deleted file mode 100644 index c8323b39ab8a..000000000000 --- a/changelog/63050.changed.md +++ /dev/null @@ -1,5 +0,0 @@ -netapi_enable_clients option to allow enabling/disabling of clients in salt-api. -By default all clients will now be disabled. Users of salt-api will need -to update their master config to enable the clients that they use. Not adding -the netapi_enable_clients option with required clients to the master config will -disable salt-api. diff --git a/changelog/63058.fixed.md b/changelog/63058.fixed.md deleted file mode 100644 index aab3c539bc06..000000000000 --- a/changelog/63058.fixed.md +++ /dev/null @@ -1,3 +0,0 @@ -Fix mongo authentication for mongo ext_pillar and mongo returner - -This fix also include the ability to use the mongo connection string for mongo ext_pillar diff --git a/changelog/63067.added.md b/changelog/63067.added.md deleted file mode 100644 index bd99632107bd..000000000000 --- a/changelog/63067.added.md +++ /dev/null @@ -1 +0,0 @@ -salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' diff --git a/changelog/63093.added.md b/changelog/63093.added.md deleted file mode 100644 index 1a401125088c..000000000000 --- a/changelog/63093.added.md +++ /dev/null @@ -1 +0,0 @@ -Add ability for file.symlink to not set ownership on existing links diff --git a/changelog/63095.added.md b/changelog/63095.added.md deleted file mode 100644 index 1802d356fd49..000000000000 --- a/changelog/63095.added.md +++ /dev/null @@ -1 +0,0 @@ -Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation diff --git a/changelog/63095.deprecated.md b/changelog/63095.deprecated.md deleted file mode 100644 index d652a377be00..000000000000 --- a/changelog/63095.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. diff --git a/changelog/63098.added.md b/changelog/63098.added.md deleted file mode 100644 index db6ef8d4f580..000000000000 --- a/changelog/63098.added.md +++ /dev/null @@ -1 +0,0 @@ -Add functions that will return the underlying block device, mount point, and filesystem type for a given path diff --git a/changelog/63103.fixed.md b/changelog/63103.fixed.md deleted file mode 100644 index 65595f61ff8a..000000000000 --- a/changelog/63103.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. diff --git a/changelog/63120.fixed.md b/changelog/63120.fixed.md deleted file mode 100644 index 80ed0d754e37..000000000000 --- a/changelog/63120.fixed.md +++ /dev/null @@ -1 +0,0 @@ -TCP transport documentation now contains proper master/minion-side filtering information diff --git a/changelog/63128.added.md b/changelog/63128.added.md deleted file mode 100644 index edbc81fb4817..000000000000 --- a/changelog/63128.added.md +++ /dev/null @@ -1 +0,0 @@ -Add ethtool execution and state module functions for pause diff --git a/changelog/63131.added.md b/changelog/63131.added.md deleted file mode 100644 index 3376c7e97cd7..000000000000 --- a/changelog/63131.added.md +++ /dev/null @@ -1 +0,0 @@ -Add boardname grain diff --git a/changelog/63145.fixed.md b/changelog/63145.fixed.md deleted file mode 100644 index 2e38588344c1..000000000000 --- a/changelog/63145.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fixed gpg.verify does not respect gnupghome diff --git a/changelog/63208.fixed.md b/changelog/63208.fixed.md deleted file mode 100644 index e8ee88a7b1a1..000000000000 --- a/changelog/63208.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Made pillar cache pass extra minion data as well diff --git a/changelog/63231.fixed.md b/changelog/63231.fixed.md deleted file mode 100644 index cda743d7bc4a..000000000000 --- a/changelog/63231.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix serious performance issues with the file.tidied module diff --git a/changelog/63248.added.md b/changelog/63248.added.md deleted file mode 100644 index 5c72116f0558..000000000000 --- a/changelog/63248.added.md +++ /dev/null @@ -1 +0,0 @@ -Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. diff --git a/changelog/63249.added.md b/changelog/63249.added.md deleted file mode 100644 index 0b56b2146f23..000000000000 --- a/changelog/63249.added.md +++ /dev/null @@ -1 +0,0 @@ -Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. diff --git a/changelog/63315.added.md b/changelog/63315.added.md deleted file mode 100644 index 950abc93277b..000000000000 --- a/changelog/63315.added.md +++ /dev/null @@ -1 +0,0 @@ -Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. diff --git a/changelog/63316.added.md b/changelog/63316.added.md deleted file mode 100644 index 253308c8ec1b..000000000000 --- a/changelog/63316.added.md +++ /dev/null @@ -1,3 +0,0 @@ -Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. -This allows users to enable state_events on a per use basis rather than having to -enable them globally for all state runs. diff --git a/changelog/63317.fixed.md b/changelog/63317.fixed.md deleted file mode 100644 index 8059d90b12d0..000000000000 --- a/changelog/63317.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. diff --git a/changelog/63350.fixed.md b/changelog/63350.fixed.md deleted file mode 100644 index a544d15d1e43..000000000000 --- a/changelog/63350.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available diff --git a/changelog/63356.added.md b/changelog/63356.added.md deleted file mode 100644 index 994e7a70d573..000000000000 --- a/changelog/63356.added.md +++ /dev/null @@ -1 +0,0 @@ -Allow max queue size setting for state runs to prevent performance problems from queue growth diff --git a/changelog/63383.changed.md b/changelog/63383.changed.md deleted file mode 100644 index d0553dc06241..000000000000 --- a/changelog/63383.changed.md +++ /dev/null @@ -1 +0,0 @@ -Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. diff --git a/changelog/63383.deprecated.md b/changelog/63383.deprecated.md deleted file mode 100644 index 5131a151314d..000000000000 --- a/changelog/63383.deprecated.md +++ /dev/null @@ -1 +0,0 @@ -`salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. diff --git a/changelog/63590.fixed.md b/changelog/63590.fixed.md deleted file mode 100644 index 0b7ea3dae4a8..000000000000 --- a/changelog/63590.fixed.md +++ /dev/null @@ -1 +0,0 @@ -When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. diff --git a/changelog/63595.changed.md b/changelog/63595.changed.md deleted file mode 100644 index ab38a24a4a5e..000000000000 --- a/changelog/63595.changed.md +++ /dev/null @@ -1 +0,0 @@ -Set enable_fqdns_grains to be False by default. diff --git a/changelog/63596.fixed.md b/changelog/63596.fixed.md deleted file mode 100644 index 38c412786a01..000000000000 --- a/changelog/63596.fixed.md +++ /dev/null @@ -1 +0,0 @@ -LGPO: Added support for "Relax minimum password length limits" diff --git a/changelog/63606.added.md b/changelog/63606.added.md deleted file mode 100644 index 563eed173f85..000000000000 --- a/changelog/63606.added.md +++ /dev/null @@ -1 +0,0 @@ -Add support of exposing meta_server_grains for Azure VMs diff --git a/changelog/63699.fixed.md b/changelog/63699.fixed.md deleted file mode 100644 index 9a125c959c49..000000000000 --- a/changelog/63699.fixed.md +++ /dev/null @@ -1 +0,0 @@ -When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. diff --git a/changelog/63710.changed.md b/changelog/63710.changed.md deleted file mode 100644 index 799e0a7aafa3..000000000000 --- a/changelog/63710.changed.md +++ /dev/null @@ -1 +0,0 @@ -Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. diff --git a/changelog/63729.fixed.md b/changelog/63729.fixed.md deleted file mode 100644 index 0c6aff3b508c..000000000000 --- a/changelog/63729.fixed.md +++ /dev/null @@ -1 +0,0 @@ -Check file is not empty before attempting to read pillar disk cache file diff --git a/changelog/63827.added.md b/changelog/63827.added.md deleted file mode 100644 index 08c3acdb0501..000000000000 --- a/changelog/63827.added.md +++ /dev/null @@ -1 +0,0 @@ -Include the version of `relenv` in the versions report. diff --git a/changelog/63835.fixed.md b/changelog/63835.fixed.md deleted file mode 100644 index 78709b4e74dc..000000000000 --- a/changelog/63835.fixed.md +++ /dev/null @@ -1 +0,0 @@ -fix cherrypy 400 error output to be less generic. diff --git a/changelog/63874.removed.md b/changelog/63874.removed.md deleted file mode 100644 index 2760e312667f..000000000000 --- a/changelog/63874.removed.md +++ /dev/null @@ -1 +0,0 @@ -Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. diff --git a/changelog/63879.fixed.md b/changelog/63879.fixed.md deleted file mode 100644 index 149567a41fa3..000000000000 --- a/changelog/63879.fixed.md +++ /dev/null @@ -1 +0,0 @@ -remove eval and update logging to be more informative on bad config diff --git a/changelog/65193.fixed.md b/changelog/65193.fixed.md new file mode 100644 index 000000000000..48a7e76e461d --- /dev/null +++ b/changelog/65193.fixed.md @@ -0,0 +1,2 @@ +Fix issue with openscap when the error was outside the expected scope. It now +returns failed with the error code and the error diff --git a/changelog/65670.fixed.md b/changelog/65670.fixed.md new file mode 100644 index 000000000000..54728d69d43a --- /dev/null +++ b/changelog/65670.fixed.md @@ -0,0 +1 @@ +Fixed Salt-SSH pillar rendering and state rendering with nested SSH calls when called via saltutil.cmd or in an orchestration diff --git a/cicd/amis.yml b/cicd/amis.yml index f1a3b0f5a9fc..bebd8141fbb4 100644 --- a/cicd/amis.yml +++ b/cicd/amis.yml @@ -1,15 +1 @@ -alma-8-x86_64: ami-0e38e779c7e9a58cb -amazon-2-x86_64: ami-0808ff1e4bd65c26e -arch-lts-x86_64: ami-05cb9be3cdb8f9ded -centos-7-x86_64: ami-062de6e6b1bea1987 -centosstream-9-x86_64: ami-0a8429c3f5f933d85 -debian-10-amd64: ami-01c864e5f37943e82 -debian-11-arm64: ami-0f0687d3bed6d0f9e -debian-11-amd64: ami-097a7bcb88c641a4f -opensuse-15-x86_64: ami-053085867e8e2a437 -photon-3-x86_64: ami-0c60abf0038288d5b -ubuntu-2004-arm64: ami-090c51d35099a31fc -ubuntu-2004-amd64: ami-0d6b6f0ee825d6cc0 -ubuntu-2204-amd64: ami-0eba94938aad408da -windows-2016-x64: ami-09e918f01701cbaca -windows-2019-x64: ami-072893e306bcedd6e +centosstream-9-x86_64: ami-0793e1741f291eaf9 diff --git a/cicd/golden-images.json b/cicd/golden-images.json index ed3b9f8deac4..e886e13ed824 100644 --- a/cicd/golden-images.json +++ b/cicd/golden-images.json @@ -1,28 +1,78 @@ { + "almalinux-8-arm64": { + "ami": "ami-0a2b327b74836f618", + "ami_description": "CI Image of AlmaLinux 8 arm64", + "ami_name": "salt-project/ci/almalinux/8/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "ec2-user" + }, "almalinux-8": { - "ami": "ami-0e38e779c7e9a58cb", + "ami": "ami-03d4319831692a030", "ami_description": "CI Image of AlmaLinux 8 x86_64", - "ami_name": "salt-project/ci/almalinux/8/x86_64/20230306.1703", + "ami_name": "salt-project/ci/almalinux/8/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "ec2-user" }, + "almalinux-9-arm64": { + "ami": "ami-01e0f60c59c6fe8f3", + "ami_description": "CI Image of AlmaLinux 9 arm64", + "ami_name": "salt-project/ci/almalinux/9/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "ec2-user" + }, "almalinux-9": { - "ami": "ami-0e478b3a983a8aa3a", + "ami": "ami-0dbbac81b50ebb8b4", "ami_description": "CI Image of AlmaLinux 9 x86_64", - "ami_name": "salt-project/ci/almalinux/9/x86_64/20230306.1703", + "ami_name": "salt-project/ci/almalinux/9/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "ec2-user" }, + "amazonlinux-2-arm64": { + "ami": "ami-05cc59dcbf59085f1", + "ami_description": "CI Image of AmazonLinux 2 arm64", + "ami_name": "salt-project/ci/amazonlinux/2/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "ec2-user" + }, "amazonlinux-2": { - "ami": "ami-0808ff1e4bd65c26e", + "ami": "ami-07f715092c8ed2451", "ami_description": "CI Image of AmazonLinux 2 x86_64", - "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20230306.1703", + "ami_name": "salt-project/ci/amazonlinux/2/x86_64/20231126.1417", + "arch": "x86_64", + "cloudwatch-agent-available": "true", + "instance_type": "t3a.large", + "is_windows": "false", + "ssh_username": "ec2-user" + }, + "amazonlinux-2023-arm64": { + "ami": "ami-074502af4314eb812", + "ami_description": "CI Image of AmazonLinux 2023 arm64", + "ami_name": "salt-project/ci/amazonlinux/2023/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "ec2-user" + }, + "amazonlinux-2023": { + "ami": "ami-0a1059334d3373321", + "ami_description": "CI Image of AmazonLinux 2023 x86_64", + "ami_name": "salt-project/ci/amazonlinux/2023/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -30,49 +80,89 @@ "ssh_username": "ec2-user" }, "archlinux-lts": { - "ami": "ami-05cb9be3cdb8f9ded", + "ami": "ami-0430452d2dfbb8f4b", "ami_description": "CI Image of ArchLinux lts x86_64", - "ami_name": "salt-project/ci/archlinux/lts/x86_64/20230306.1703", + "ami_name": "salt-project/ci/archlinux/lts/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "false", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "arch" }, + "centos-7-arm64": { + "ami": "ami-0fc26a930a59d1417", + "ami_description": "CI Image of CentOS 7 arm64", + "ami_name": "salt-project/ci/centos/7/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "centos" + }, "centos-7": { - "ami": "ami-062de6e6b1bea1987", + "ami": "ami-0532c2c5f18771fa8", "ami_description": "CI Image of CentOS 7 x86_64", - "ami_name": "salt-project/ci/centos/7/x86_64/20230306.1703", + "ami_name": "salt-project/ci/centos/7/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "centos" }, + "centosstream-8-arm64": { + "ami": "ami-0916df690c02e0af0", + "ami_description": "CI Image of CentOSStream 8 arm64", + "ami_name": "salt-project/ci/centosstream/8/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "centos" + }, "centosstream-8": { - "ami": "ami-00b1e2790dced55ba", + "ami": "ami-06cf36f0232c681e2", "ami_description": "CI Image of CentOSStream 8 x86_64", - "ami_name": "salt-project/ci/centosstream/8/x86_64/20230306.1703", + "ami_name": "salt-project/ci/centosstream/8/x86_64/20231126.1416", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", - "ssh_username": "cloud-user" + "ssh_username": "centos" + }, + "centosstream-9-arm64": { + "ami": "ami-094e17e254aa77811", + "ami_description": "CI Image of CentOSStream 9 arm64", + "ami_name": "salt-project/ci/centosstream/9/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "ec2-user" }, "centosstream-9": { - "ami": "ami-0a8429c3f5f933d85", + "ami": "ami-0793e1741f291eaf9", "ami_description": "CI Image of CentOSStream 9 x86_64", - "ami_name": "salt-project/ci/centosstream/9/x86_64/20230306.1703", + "ami_name": "salt-project/ci/centosstream/9/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "ec2-user" }, + "debian-10-arm64": { + "ami": "ami-0c0b1bdab1b3c9733", + "ami_description": "CI Image of Debian 10 arm64", + "ami_name": "salt-project/ci/debian/10/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "false", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "admin" + }, "debian-10": { - "ami": "ami-01c864e5f37943e82", + "ami": "ami-082605fda5afd9131", "ami_description": "CI Image of Debian 10 x86_64", - "ami_name": "salt-project/ci/debian/10/x86_64/20230306.1703", + "ami_name": "salt-project/ci/debian/10/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -80,9 +170,9 @@ "ssh_username": "admin" }, "debian-11-arm64": { - "ami": "ami-0f0687d3bed6d0f9e", + "ami": "ami-0df6946d840d24ced", "ami_description": "CI Image of Debian 11 arm64", - "ami_name": "salt-project/ci/debian/11/arm64/20230306.1703", + "ami_name": "salt-project/ci/debian/11/arm64/20231126.1417", "arch": "arm64", "cloudwatch-agent-available": "false", "instance_type": "m6g.large", @@ -90,39 +180,69 @@ "ssh_username": "admin" }, "debian-11": { - "ami": "ami-097a7bcb88c641a4f", + "ami": "ami-0c2198080c953861d", "ami_description": "CI Image of Debian 11 x86_64", - "ami_name": "salt-project/ci/debian/11/x86_64/20230306.1703", + "ami_name": "salt-project/ci/debian/11/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "admin" }, - "fedora-36": { - "ami": "ami-04d3d4001c7eb6084", - "ami_description": "CI Image of Fedora 36 x86_64", - "ami_name": "salt-project/ci/fedora/36/x86_64/20230306.1703", + "debian-12-arm64": { + "ami": "ami-050b69eb0e0a66373", + "ami_description": "CI Image of Debian 12 arm64", + "ami_name": "salt-project/ci/debian/12/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "false", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "admin" + }, + "debian-12": { + "ami": "ami-032e397b97865f83e", + "ami_description": "CI Image of Debian 12 x86_64", + "ami_name": "salt-project/ci/debian/12/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", + "ssh_username": "admin" + }, + "fedora-37-arm64": { + "ami": "ami-0000739b5d4971ba1", + "ami_description": "CI Image of Fedora 37 arm64", + "ami_name": "salt-project/ci/fedora/37/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", "ssh_username": "fedora" }, "fedora-37": { - "ami": "ami-047f01d4d1b8eade2", + "ami": "ami-086af8fe37696acd6", "ami_description": "CI Image of Fedora 37 x86_64", - "ami_name": "salt-project/ci/fedora/37/x86_64/20230306.1703", + "ami_name": "salt-project/ci/fedora/37/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "fedora" }, + "fedora-38-arm64": { + "ami": "ami-0a078cdd3a57ef342", + "ami_description": "CI Image of Fedora 38 arm64", + "ami_name": "salt-project/ci/fedora/38/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "fedora" + }, "fedora-38": { - "ami": "ami-04600a36ca6b94c1c", + "ami": "ami-0566e7f93c5cf6afc", "ami_description": "CI Image of Fedora 38 x86_64", - "ami_name": "salt-project/ci/fedora/38/x86_64/20230306.1703", + "ami_name": "salt-project/ci/fedora/38/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -130,29 +250,69 @@ "ssh_username": "fedora" }, "opensuse-15": { - "ami": "ami-053085867e8e2a437", + "ami": "ami-0c64c574d488d33f6", "ami_description": "CI Image of Opensuse 15 x86_64", - "ami_name": "salt-project/ci/opensuse/15/x86_64/20230306.1703", + "ami_name": "salt-project/ci/opensuse/15/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "ec2-user" }, + "photonos-3-arm64": { + "ami": "ami-03ac724168ce02eed", + "ami_description": "CI Image of PhotonOS 3 arm64", + "ami_name": "salt-project/ci/photonos/3/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "root" + }, "photonos-3": { - "ami": "ami-0c60abf0038288d5b", + "ami": "ami-0072dfd1f7bc5f586", "ami_description": "CI Image of PhotonOS 3 x86_64", - "ami_name": "salt-project/ci/photonos/3/x86_64/20230306.1703", + "ami_name": "salt-project/ci/photonos/3/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", "is_windows": "false", "ssh_username": "root" }, + "photonos-4-arm64": { + "ami": "ami-05a215fe4cf29227b", + "ami_description": "CI Image of PhotonOS 4 arm64", + "ami_name": "salt-project/ci/photonos/4/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "root" + }, "photonos-4": { - "ami": "ami-0f05edd4afe1b852b", + "ami": "ami-06addda42fc8c5db3", "ami_description": "CI Image of PhotonOS 4 x86_64", - "ami_name": "salt-project/ci/photonos/4/x86_64/20230306.1703", + "ami_name": "salt-project/ci/photonos/4/x86_64/20231126.1417", + "arch": "x86_64", + "cloudwatch-agent-available": "true", + "instance_type": "t3a.large", + "is_windows": "false", + "ssh_username": "root" + }, + "photonos-5-arm64": { + "ami": "ami-0e78012df225dbe96", + "ami_description": "CI Image of PhotonOS 5 arm64", + "ami_name": "salt-project/ci/photonos/5/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "root" + }, + "photonos-5": { + "ami": "ami-0fc61f964bc262714", + "ami_description": "CI Image of PhotonOS 5 x86_64", + "ami_name": "salt-project/ci/photonos/5/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -160,9 +320,9 @@ "ssh_username": "root" }, "ubuntu-20.04-arm64": { - "ami": "ami-090c51d35099a31fc", + "ami": "ami-0cc504307b587cd77", "ami_description": "CI Image of Ubuntu 20.04 arm64", - "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20230306.1703", + "ami_name": "salt-project/ci/ubuntu/20.04/arm64/20231126.1417", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -170,9 +330,9 @@ "ssh_username": "ubuntu" }, "ubuntu-20.04": { - "ami": "ami-0d6b6f0ee825d6cc0", + "ami": "ami-03376fca39f6d9186", "ami_description": "CI Image of Ubuntu 20.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20230306.1703", + "ami_name": "salt-project/ci/ubuntu/20.04/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -180,9 +340,9 @@ "ssh_username": "ubuntu" }, "ubuntu-22.04-arm64": { - "ami": "ami-01b52de1f19e79343", + "ami": "ami-0be361d529bb46410", "ami_description": "CI Image of Ubuntu 22.04 arm64", - "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20230306.1703", + "ami_name": "salt-project/ci/ubuntu/22.04/arm64/20231126.1417", "arch": "arm64", "cloudwatch-agent-available": "true", "instance_type": "m6g.large", @@ -190,9 +350,29 @@ "ssh_username": "ubuntu" }, "ubuntu-22.04": { - "ami": "ami-0eba94938aad408da", + "ami": "ami-0c9d29f29868da4ce", "ami_description": "CI Image of Ubuntu 22.04 x86_64", - "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20230306.1704", + "ami_name": "salt-project/ci/ubuntu/22.04/x86_64/20231126.1417", + "arch": "x86_64", + "cloudwatch-agent-available": "true", + "instance_type": "t3a.large", + "is_windows": "false", + "ssh_username": "ubuntu" + }, + "ubuntu-23.04-arm64": { + "ami": "ami-0b80ab7ead3c7d289", + "ami_description": "CI Image of Ubuntu 23.04 arm64", + "ami_name": "salt-project/ci/ubuntu/23.04/arm64/20231126.1417", + "arch": "arm64", + "cloudwatch-agent-available": "true", + "instance_type": "m6g.large", + "is_windows": "false", + "ssh_username": "ubuntu" + }, + "ubuntu-23.04": { + "ami": "ami-0d17dce1842e37811", + "ami_description": "CI Image of Ubuntu 23.04 x86_64", + "ami_name": "salt-project/ci/ubuntu/23.04/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.large", @@ -200,9 +380,9 @@ "ssh_username": "ubuntu" }, "windows-2016": { - "ami": "ami-09e918f01701cbaca", + "ami": "ami-043db64b3b46a804c", "ami_description": "CI Image of Windows 2016 x86_64", - "ami_name": "salt-project/ci/windows/2016/x86_64/20230306.1703", + "ami_name": "salt-project/ci/windows/2016/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", @@ -210,9 +390,9 @@ "ssh_username": "Administrator" }, "windows-2019": { - "ami": "ami-072893e306bcedd6e", + "ami": "ami-0f7a8dc8862bff13f", "ami_description": "CI Image of Windows 2019 x86_64", - "ami_name": "salt-project/ci/windows/2019/x86_64/20230306.1703", + "ami_name": "salt-project/ci/windows/2019/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", @@ -220,9 +400,9 @@ "ssh_username": "Administrator" }, "windows-2022": { - "ami": "ami-0cfbb2e681636b902", + "ami": "ami-05a829f3649aa33d6", "ami_description": "CI Image of Windows 2022 x86_64", - "ami_name": "salt-project/ci/windows/2022/x86_64/20230306.1703", + "ami_name": "salt-project/ci/windows/2022/x86_64/20231126.1417", "arch": "x86_64", "cloudwatch-agent-available": "true", "instance_type": "t3a.xlarge", diff --git a/cicd/shared-gh-workflows-context.yml b/cicd/shared-gh-workflows-context.yml new file mode 100644 index 000000000000..c2691494b371 --- /dev/null +++ b/cicd/shared-gh-workflows-context.yml @@ -0,0 +1,3 @@ +nox_version: "2022.8.7" +python_version: "3.10.13" +relenv_version: "0.14.2" diff --git a/conf/cloud b/conf/cloud index 5a379e674562..105fc0785924 100644 --- a/conf/cloud +++ b/conf/cloud @@ -29,7 +29,7 @@ # One of 'garbage', 'trace', 'debug', 'info', 'warning', 'error', 'critical'. # # The following log levels are considered INSECURE and may log sensitive data: -# ['garbage', 'trace', 'debug'] +# ['profile', 'garbage', 'trace', 'debug', 'all'] # # Default: 'info' # diff --git a/conf/master b/conf/master index d655e93d45fb..1e951bacfded 100644 --- a/conf/master +++ b/conf/master @@ -60,15 +60,19 @@ # #cachedir: /var/cache/salt/master -# Directory for custom modules. This directory can contain subdirectories for -# each of Salt's module types such as "runners", "output", "wheel", "modules", -# "states", "returners", "engines", "utils", etc. +# Directory where custom modules sync to. This directory can contain +# subdirectories for each of Salt's module types such as "runners", +# "output", "wheel", "modules", "states", "returners", "engines", +# "utils", etc. +# +# Note, any directories or files not found in the `module_dirs` +# location will be removed from the extension_modules path. + #extension_modules: /var/cache/salt/master/extmods # Directory for custom modules. This directory can contain subdirectories for # each of Salt's module types such as "runners", "output", "wheel", "modules", # "states", "returners", "engines", "utils", etc. -# Like 'extension_modules' but can take an array of paths #module_dirs: [] # Verify and set permissions on configuration directories at startup: @@ -1021,6 +1025,7 @@ # If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set `pillar_cache: True`, one of several storage providers @@ -1193,7 +1198,7 @@ # One of 'garbage', 'trace', 'debug', info', 'warning', 'error', 'critical'. # # The following log levels are considered INSECURE and may log sensitive data: -# ['garbage', 'trace', 'debug'] +# ['profile', 'garbage', 'trace', 'debug', 'all'] # #log_level: warning diff --git a/conf/minion b/conf/minion index eeef626fa833..a54f1659a792 100644 --- a/conf/minion +++ b/conf/minion @@ -809,7 +809,7 @@ # One of 'garbage', 'trace', 'debug', 'info', 'warning', 'error', 'critical'. # # The following log levels are considered INSECURE and may log sensitive data: -# ['garbage', 'trace', 'debug'] +# ['profile', 'garbage', 'trace', 'debug', 'all'] # # Default: 'warning' #log_level: warning diff --git a/conf/proxy b/conf/proxy index 47d6ac668dfb..0cfe7b142250 100644 --- a/conf/proxy +++ b/conf/proxy @@ -545,7 +545,7 @@ # One of 'garbage', 'trace', 'debug', 'info', 'warning', 'error', 'critical'. # # The following log levels are considered INSECURE and may log sensitive data: -# ['garbage', 'trace', 'debug'] +# ['profile', 'garbage', 'trace', 'debug', 'all'] # # Default: 'warning' #log_level: warning diff --git a/conf/suse/master b/conf/suse/master index 7168441dc41d..863d87902402 100644 --- a/conf/suse/master +++ b/conf/suse/master @@ -950,6 +950,7 @@ syndic_user: salt # If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set `pillar_cache: True`, one of several storage providers diff --git a/doc/_themes/saltstack2/layout.html b/doc/_themes/saltstack2/layout.html index 04bff89e1fb7..32fff3e60952 100644 --- a/doc/_themes/saltstack2/layout.html +++ b/doc/_themes/saltstack2/layout.html @@ -2,10 +2,16 @@ {%- endblock %} -{% set xxx = rellinks.extend([ +{%- set link_text = [] %} +{%- for link_tuple in rellinks %} +{%- set _ = link_text.append(link_tuple[3]) %} +{%- endfor %} +{%- for rellink_add in [ ('glossary', 'Glossary', 'g', 'Glossary'), ('contents', 'Table of Contents', 't', 'Table of Contents'), -]) %} +] %} +{%- if rellink_add[3] not in link_text %}{% set _ = rellinks.append(rellink_add) %}{% endif %} +{%- endfor %} {%- set reldelim1 = reldelim1 is not defined and ' »' or reldelim1 %} {%- set reldelim2 = reldelim2 is not defined and ' |' or reldelim2 %} @@ -152,7 +158,7 @@ - +
- +
{% endif %} @@ -295,7 +301,7 @@ {% else %} {% endif %} #}--> - + {% if build_type=="next" %} diff --git a/doc/conf.py b/doc/conf.py index 653d912c20d3..2b60d5b0a4e4 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -3,12 +3,18 @@ Sphinx documentation for Salt """ import os +import pathlib import re +import shutil import sys +import textwrap import time import types from sphinx.directives.other import TocTree +from sphinx.util import logging + +log = logging.getLogger(__name__) # -- Add paths to PYTHONPATH --------------------------------------------------- try: @@ -174,7 +180,7 @@ .. _`salt-users`: https://groups.google.com/forum/#!forum/salt-users .. _`salt-announce`: https://groups.google.com/forum/#!forum/salt-announce .. _`salt-packagers`: https://groups.google.com/forum/#!forum/salt-packagers -.. _`salt-slack`: https://join.slack.com/t/saltstackcommunity/shared_invite/zt-3av8jjyf-oBQ2M0vhXOhJpNpRkPWBvg +.. _`salt-slack`: https://via.vmw.com/salt-slack .. |windownload| raw:: html

Python3 x86: '%s' just for this build ...", + fpath.relative_to(docs_path), + dest.relative_to(docs_path), + ) + app._copied_release_files.append(dest) + shutil.copyfile(fpath, dest) + + +def copy_release_templates_post(app, exception): + docs_path = pathlib.Path(docs_basepath) + for fpath in app._copied_release_files: + log.info( + "The release file '%s' was copied for the build, but its not in " + "version control system. Deleting.", + fpath.relative_to(docs_path), + ) + fpath.unlink() + + +def extract_module_deprecations(app, what, name, obj, options, lines): + """ + Add a warning to the modules being deprecated into extensions. + """ + # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#event-autodoc-process-docstring + if what != "module": + # We're only interested in module deprecations + return + + try: + deprecated_info = obj.__deprecated__ + except AttributeError: + # The module is not deprecated + return + + _version, _extension, _url = deprecated_info + msg = textwrap.dedent( + f""" + .. warning:: + + This module will be removed from Salt in version {_version} in favor of + the `{_extension} Salt Extension <{_url}>`_. + + """ + ) + # Modify the docstring lines in-place + lines[:] = msg.splitlines() + lines + + def setup(app): app.add_directive("releasestree", ReleasesTree) app.connect("autodoc-skip-member", skip_mod_init_member) + app.connect("builder-inited", copy_release_templates_pre) + app.connect("build-finished", copy_release_templates_post) + app.connect("autodoc-process-docstring", extract_module_deprecations) diff --git a/doc/man/salt-api.1 b/doc/man/salt-api.1 index ed4d4d314a3b..214422b243a4 100644 --- a/doc/man/salt-api.1 +++ b/doc/man/salt-api.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-API" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-API" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-api \- salt-api Command .sp diff --git a/doc/man/salt-call.1 b/doc/man/salt-call.1 index 1f2227dbfa05..2e6698b8d4ac 100644 --- a/doc/man/salt-call.1 +++ b/doc/man/salt-call.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CALL" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-CALL" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-call \- salt-call Documentation .SH SYNOPSIS diff --git a/doc/man/salt-cloud.1 b/doc/man/salt-cloud.1 index b8d8d8019c54..7dc0450cc5cd 100644 --- a/doc/man/salt-cloud.1 +++ b/doc/man/salt-cloud.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CLOUD" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-CLOUD" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-cloud \- Salt Cloud Command .sp diff --git a/doc/man/salt-cp.1 b/doc/man/salt-cp.1 index 22c0620dcddc..015e1b3461ce 100644 --- a/doc/man/salt-cp.1 +++ b/doc/man/salt-cp.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-CP" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-CP" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-cp \- salt-cp Documentation .sp diff --git a/doc/man/salt-key.1 b/doc/man/salt-key.1 index c45d1da04675..f346507ed58d 100644 --- a/doc/man/salt-key.1 +++ b/doc/man/salt-key.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-KEY" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-KEY" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-key \- salt-key Documentation .SH SYNOPSIS diff --git a/doc/man/salt-master.1 b/doc/man/salt-master.1 index 90cb52201b5f..1b54882a9b2e 100644 --- a/doc/man/salt-master.1 +++ b/doc/man/salt-master.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-MASTER" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-MASTER" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-master \- salt-master Documentation .sp diff --git a/doc/man/salt-minion.1 b/doc/man/salt-minion.1 index b993e9a3883f..d02a0bfa2e54 100644 --- a/doc/man/salt-minion.1 +++ b/doc/man/salt-minion.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-MINION" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-MINION" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-minion \- salt-minion Documentation .sp diff --git a/doc/man/salt-proxy.1 b/doc/man/salt-proxy.1 index d6c2437dd525..0f5b3b8e40e3 100644 --- a/doc/man/salt-proxy.1 +++ b/doc/man/salt-proxy.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-PROXY" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-PROXY" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-proxy \- salt-proxy Documentation .sp diff --git a/doc/man/salt-run.1 b/doc/man/salt-run.1 index 13bb1b407fce..1535cbde86c5 100644 --- a/doc/man/salt-run.1 +++ b/doc/man/salt-run.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-RUN" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-RUN" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-run \- salt-run Documentation .sp diff --git a/doc/man/salt-ssh.1 b/doc/man/salt-ssh.1 index d047f827d410..5771453ad143 100644 --- a/doc/man/salt-ssh.1 +++ b/doc/man/salt-ssh.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-SSH" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-SSH" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-ssh \- salt-ssh Documentation .SH SYNOPSIS diff --git a/doc/man/salt-syndic.1 b/doc/man/salt-syndic.1 index 028bbcce8b22..7a42e21c7676 100644 --- a/doc/man/salt-syndic.1 +++ b/doc/man/salt-syndic.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT-SYNDIC" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT-SYNDIC" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt-syndic \- salt-syndic Documentation .sp diff --git a/doc/man/salt.1 b/doc/man/salt.1 index 884ba1cdee6d..127fce42145c 100644 --- a/doc/man/salt.1 +++ b/doc/man/salt.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt \- salt .SH SYNOPSIS diff --git a/doc/man/salt.7 b/doc/man/salt.7 index d84a96debc45..a2232d27d5ae 100644 --- a/doc/man/salt.7 +++ b/doc/man/salt.7 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SALT" "7" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SALT" "7" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME salt \- Salt Documentation .SH SALT PROJECT @@ -84,7 +84,8 @@ outages, common administration problems, or other important events. .UNINDENT .SS About our sponsors .sp -Salt powers VMware\(aqs \fI\%vRealize Automation SaltStack Config\fP, and can be found +Salt powers VMware\(aqs \fI\%VMware Aria Automation Config\fP +(previously vRealize Automation SaltStack Config / SaltStack Enterprise), and can be found under the hood of products from Juniper, Cisco, Cloudflare, Nutanix, SUSE, and Tieto, to name a few. .sp @@ -1301,7 +1302,7 @@ Now you can run your tests: .sp .nf .ft C -python \-m nox \-e \(dqpytest\-3.7(coverage=False)\(dq \-\- tests/unit/cli/test_batch.py +python \-m nox \-e \(dqtest\-3(coverage=False)\(dq \-\- tests/unit/cli/test_batch.py .ft P .fi .UNINDENT @@ -1316,13 +1317,48 @@ this: .sp .nf .ft C -python \-m nox \-e \(dqpytest\-3.7(coverage=False)\(dq \-\- tests/unit/cli/test_batch.py; espeak \(dqTests done, woohoo!\(dq +python \-m nox \-e \(dqtest\-3(coverage=False)\(dq \-\- tests/unit/cli/test_batch.py; espeak \(dqTests done, woohoo!\(dq .ft P .fi .UNINDENT .UNINDENT .sp That way you don\(aqt have to keep monitoring the actual test run. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +python \-m nox \-e \(dqtest\-3(coverage=False)\(dq \-\- \-\-core\-tests +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +You can enable or disable test groups locally by passing their respected flag: +.INDENT 0.0 +.IP \(bu 2 +\-\-no\-fast\-tests \- Tests that are ~10s or faster. Fast tests make up ~75% of tests and can run in 10 to 20 minutes. +.IP \(bu 2 +\-\-slow\-tests \- Tests that are ~10s or slower. +.IP \(bu 2 +\-\-core\-tests \- Tests of any speed that test the root parts of salt. +.IP \(bu 2 +\-\-flaky\-jail \- Test that need to be temporarily skipped. +.UNINDENT +.sp +In your PR, you can enable or disable test groups by setting a label. +All fast, slow, and core tests specified in the change file will always run. +.INDENT 0.0 +.IP \(bu 2 +test:no\-fast +.IP \(bu 2 +test:core +.IP \(bu 2 +test:slow +.IP \(bu 2 +test:flaky\-jail +.UNINDENT .SS Changelog and commit! .sp When you write your commit message you should use imperative style. Do @@ -1345,7 +1381,7 @@ But that advice is backwards for the changelog. We follow the our changelog, and use towncrier to generate it for each release. As a contributor, all that means is that you need to add a file to the \fBsalt/changelog\fP directory, using the \fB.\fP format. For -instanch, if you fixed issue 123, you would do: +instance, if you fixed issue 123, you would do: .INDENT 0.0 .INDENT 3.5 .sp @@ -1368,6 +1404,9 @@ If someone isn\(aqt an expert in this area, what will they need to know? .sp This will also help you out, because when you go to create the PR it will automatically insert the body of your commit messages. +.sp +See the \fI\%changelog\fP +docs for more information. .SS Pull request time! .sp Once you\(aqve done all your dev work and tested locally, you should check @@ -1756,10 +1795,13 @@ moved into the master cachedir (on most platforms, \fB/var/cache/salt/master/extmods\fP). .sp -Directory for custom modules. This directory can contain subdirectories for -each of Salt\(aqs module types such as \fBrunners\fP, \fBoutput\fP, \fBwheel\fP, -\fBmodules\fP, \fBstates\fP, \fBreturners\fP, \fBengines\fP, \fButils\fP, etc. -This path is appended to \fI\%root_dir\fP\&. +Directory where custom modules are synced to. This directory can contain +subdirectories for each of Salt\(aqs module types such as \fBrunners\fP, +\fBoutput\fP, \fBwheel\fP, \fBmodules\fP, \fBstates\fP, \fBreturners\fP, \fBengines\fP, +\fButils\fP, etc. This path is appended to \fI\%root_dir\fP\&. +.sp +Note, any directories or files not found in the \fImodule_dirs\fP location +will be removed from the extension_modules path. .INDENT 0.0 .INDENT 3.5 .sp @@ -1954,9 +1996,27 @@ Set the default timeout for the salt command and api. .sp Default: \fB60\fP .sp -The loop_interval option controls the seconds for the master\(aqs maintenance +The loop_interval option controls the seconds for the master\(aqs Maintenance process check cycle. This process updates file server backends, cleans the job cache and executes the scheduler. +.SS \fBmaintenance_interval\fP +.sp +New in version 3006.0. + +.sp +Default: \fB3600\fP +.sp +Defines how often to restart the master\(aqs Maintenance process. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +maintenance_interval: 9600 +.ft P +.fi +.UNINDENT +.UNINDENT .SS \fBoutput\fP .sp Default: \fBnested\fP @@ -6214,6 +6274,24 @@ s3fs_update_interval: 120 .fi .UNINDENT .UNINDENT +.SS \fBfileserver_interval\fP +.sp +New in version 3006.0. + +.sp +Default: \fB3600\fP +.sp +Defines how often to restart the master\(aqs FilesServerUpdate process. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +fileserver_interval: 9600 +.ft P +.fi +.UNINDENT +.UNINDENT .SS Pillar Configuration .SS \fBpillar_roots\fP .sp @@ -7303,6 +7381,7 @@ Default: \fB3600\fP If and only if a master has set \fBpillar_cache: True\fP, the cache TTL controls the amount of time, in seconds, before the cache is considered invalid by a master and a fresh pillar is recompiled and stored. +The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. .SS \fBpillar_cache_backend\fP .sp New in version 2015.8.8. @@ -7804,6 +7883,13 @@ log_level: warning .fi .UNINDENT .UNINDENT +.sp +Any log level below the \fIinfo\fP level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all .SS \fBlog_level_logfile\fP .sp Default: \fBwarning\fP @@ -7821,6 +7907,13 @@ log_level_logfile: warning .fi .UNINDENT .UNINDENT +.sp +Any log level below the \fIinfo\fP level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all .SS \fBlog_datefmt\fP .sp Default: \fB%H:%M:%S\fP @@ -10108,8 +10201,8 @@ acceptance_wait_time_max: 0 .sp Default: \fBFalse\fP .sp -If the master rejects the minion\(aqs public key, retry instead of exiting. -Rejected keys will be handled the same as waiting on acceptance. +If the master denies or rejects the minion\(aqs public key, retry instead of +exiting. These keys will be handled the same as waiting on acceptance. .INDENT 0.0 .INDENT 3.5 .sp @@ -10239,6 +10332,44 @@ auth_safemode: False .fi .UNINDENT .UNINDENT +.SS \fBrequest_channel_timeout\fP +.sp +New in version 3006.2. + +.sp +Default: \fB30\fP +.sp +The default timeout timeout for request channel requests. This setting can be used to tune minions to better handle long running pillar and file client requests. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +request_channel_timeout: 30 +.ft P +.fi +.UNINDENT +.UNINDENT +.SS \fBrequest_channel_tries\fP +.sp +New in version 3006.2. + +.sp +Default: \fB3\fP +.sp +The default number of times the minion will try request channel requests. This +setting can be used to tune minions to better handle long running pillar and +file client requests by retrying them after a timeout happens. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +request_channel_tries: 3 +.ft P +.fi +.UNINDENT +.UNINDENT .SS \fBping_interval\fP .sp Default: \fB0\fP @@ -11561,10 +11692,7 @@ Changed in version 2018.3.0: Renamed from \fBenvironment\fP to \fBsaltenv\fP\&. ignored and \fBsaltenv\fP will be used. .sp -Normally the minion is not isolated to any single environment on the master -when running states, but the environment can be isolated on the minion side -by statically setting it. Remember that the recommended way to manage -environments is to isolate via the top file. +The default fileserver environment to use when copying files and applying states. .INDENT 0.0 .INDENT 3.5 .sp @@ -12533,6 +12661,13 @@ log_level: warning .fi .UNINDENT .UNINDENT +.sp +Any log level below the \fIinfo\fP level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all .SS \fBlog_level_logfile\fP .sp Default: \fBwarning\fP @@ -12550,6 +12685,13 @@ log_level_logfile: warning .fi .UNINDENT .UNINDENT +.sp +Any log level below the \fIinfo\fP level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all .SS \fBlog_datefmt\fP .sp Default: \fB%H:%M:%S\fP @@ -13895,15 +14037,19 @@ For reference, see: # #cachedir: /var/cache/salt/master -# Directory for custom modules. This directory can contain subdirectories for -# each of Salt\(aqs module types such as \(dqrunners\(dq, \(dqoutput\(dq, \(dqwheel\(dq, \(dqmodules\(dq, -# \(dqstates\(dq, \(dqreturners\(dq, \(dqengines\(dq, \(dqutils\(dq, etc. +# Directory where custom modules sync to. This directory can contain +# subdirectories for each of Salt\(aqs module types such as \(dqrunners\(dq, +# \(dqoutput\(dq, \(dqwheel\(dq, \(dqmodules\(dq, \(dqstates\(dq, \(dqreturners\(dq, \(dqengines\(dq, +# \(dqutils\(dq, etc. +# +# Note, any directories or files not found in the \(gamodule_dirs\(ga +# location will be removed from the extension_modules path. + #extension_modules: /var/cache/salt/master/extmods # Directory for custom modules. This directory can contain subdirectories for # each of Salt\(aqs module types such as \(dqrunners\(dq, \(dqoutput\(dq, \(dqwheel\(dq, \(dqmodules\(dq, # \(dqstates\(dq, \(dqreturners\(dq, \(dqengines\(dq, \(dqutils\(dq, etc. -# Like \(aqextension_modules\(aq but can take an array of paths #module_dirs: [] # Verify and set permissions on configuration directories at startup: @@ -14856,6 +15002,7 @@ For reference, see: # If and only if a master has set \(ga\(gapillar_cache: True\(ga\(ga, the cache TTL controls the amount # of time, in seconds, before the cache is considered invalid by a master and a fresh # pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. #pillar_cache_ttl: 3600 # If and only if a master has set \(gapillar_cache: True\(ga, one of several storage providers @@ -15028,7 +15175,7 @@ For reference, see: # One of \(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq, info\(aq, \(aqwarning\(aq, \(aqerror\(aq, \(aqcritical\(aq. # # The following log levels are considered INSECURE and may log sensitive data: -# [\(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq] +# [\(aqprofile\(aq, \(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq, \(aqall\(aq] # #log_level: warning @@ -16009,7 +16156,7 @@ For reference, see: # One of \(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq, \(aqinfo\(aq, \(aqwarning\(aq, \(aqerror\(aq, \(aqcritical\(aq. # # The following log levels are considered INSECURE and may log sensitive data: -# [\(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq] +# [\(aqprofile\(aq, \(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq, \(aqall\(aq] # # Default: \(aqwarning\(aq #log_level: warning @@ -16720,7 +16867,7 @@ For reference, see: # One of \(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq, \(aqinfo\(aq, \(aqwarning\(aq, \(aqerror\(aq, \(aqcritical\(aq. # # The following log levels are considered INSECURE and may log sensitive data: -# [\(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq] +# [\(aqprofile\(aq, \(aqgarbage\(aq, \(aqtrace\(aq, \(aqdebug\(aq, \(aqall\(aq] # # Default: \(aqwarning\(aq #log_level: warning @@ -18887,6 +19034,13 @@ Everything T} _ .TE +.sp +Any log level below the \fIinfo\fP level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all .SS Available Configuration Settings .SS \fBlog_file\fP .sp @@ -19823,6 +19977,13 @@ specifying both the file path and the environment to search. The individual environments can span across multiple directory roots to create overlays and to allow for files to be organized in many flexible ways. +.SS Periodic Restarts +.sp +The file server will restart periodically. The reason for this is to prevent any +files erver backends which may not properly handle resources from endlessly +consuming memory. A notable example of this is using a git backend with the +pygit2 library. How often the file server restarts can be controlled with the +\fBfileserver_interval\fP in your master\(aqs config file. .SS Environments .sp The Salt file server defaults to the mandatory \fBbase\fP environment. This @@ -23587,7 +23748,7 @@ most secure setup, only connect syndics directly to master of masters. .INDENT 0.0 .TP .B email -\fI\%security@saltstack.com\fP +\fI\%saltproject\-security.pdl@broadcom.com\fP .TP .B gpg key ID 4EA0793D @@ -23708,7 +23869,7 @@ fwPKmQ2cKnCBs5ASj1DkgUcz2c8DTUPVqg== .UNINDENT .UNINDENT .sp -The SaltStack Security Team is available at \fI\%security@saltstack.com\fP for +The SaltStack Security Team is available at \fI\%saltproject\-security.pdl@broadcom.com\fP for security\-related bug reports or questions. .sp We request the disclosure of any security\-related bugs or issues be reported @@ -23722,7 +23883,7 @@ seriously. Our disclosure policy is intended to resolve security issues as quickly and safely as is possible. .INDENT 0.0 .IP 1. 3 -A security report sent to \fI\%security@saltstack.com\fP is assigned to a team +A security report sent to \fI\%saltproject\-security.pdl@broadcom.com\fP is assigned to a team member. This person is the primary contact for questions and will coordinate the fix, release, and announcement. .IP 2. 3 @@ -31273,6 +31434,10 @@ A Python data structure .SS salt.renderers.mako .sp Mako Renderer for Salt +.sp +This renderer requires the Mako library. +.sp +To install Mako, do the following: .INDENT 0.0 .TP .B salt.renderers.mako.render(template_file, saltenv=\(aqbase\(aq, sls=\(aq\(aq, context=None, tmplpath=None, **kws) @@ -31309,8 +31474,7 @@ The typical use\-case would be to use ciphers in your pillar data, and keep a secret key on your master. You can put the public key in source control so that developers can add new secrets quickly and easily. .sp -This renderer requires the libsodium library binary and libnacl >= 1.5.1 -python package (support for sealed boxes came in 1.5.1 version). +This renderer requires the libsodium library binary and PyNacl >= 1.0 .SS Setup .sp To set things up, first generate a keypair. On the master, run the following: @@ -31539,8 +31703,30 @@ environment is \fB/srv/salt\fP, and the SLS file is \fBfoo.bar.baz\fP\&. .UNINDENT .sp -When writing a reactor SLS file the global context \fBdata\fP (same as context \fB{{ data }}\fP -for states written with Jinja + YAML) is available. The following YAML + Jinja state declaration: +When used in a scenario where additional user\-provided context data is supplied +(such as with \fI\%file.managed\fP), the additional +data will typically be injected into the script as one or more global +variables: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +/etc/http/conf/http.conf: + file.managed: + \- source: salt://apache/generate_http_conf.py + \- template: py + \- context: + # Will be injected as the global variable \(dqsite_name\(dq. + site_name: {{ site_name }} +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +When writing a reactor SLS file the global context \fBdata\fP (same as context +\fB{{ data }}\fP for states written with Jinja + YAML) is available. The +following YAML + Jinja state declaration: .INDENT 0.0 .INDENT 3.5 .sp @@ -50507,6 +50693,98 @@ def test_issue_58763_a(tmp_path, modules, state_tree, caplog): .fi .UNINDENT .UNINDENT +.SS Test Groups +.sp +Salt has four groups +.INDENT 0.0 +.IP \(bu 2 +fast \- Tests that are ~10s or faster. Fast tests make up ~75% of tests and can run in 10 to 20 minutes. +.IP \(bu 2 +slow \- Tests that are ~10s or slower. +.IP \(bu 2 +core \- Tests of any speed that test the root parts of salt. +.IP \(bu 2 +flaky\-jail \- Test that need to be temporarily skipped. +.UNINDENT +.sp +Pytest Decorators +.INDENT 0.0 +.IP \(bu 2 +@pytest.mark.slow_test +.IP \(bu 2 +@pytest.mark.core_test +.IP \(bu 2 +@pytest.mark.flaky_jail +.UNINDENT +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +@pytest.mark.core_test +def test_ping(self): + \(dq\(dq\(dq + test.ping + \(dq\(dq\(dq + self.assertTrue(self.run_function(\(dqtest.ping\(dq)) +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +You can also mark all the tests in file. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pytestmark = [ + pytest.mark.core_test, +] + + +def test_ping(self): + \(dq\(dq\(dq + test.ping + \(dq\(dq\(dq + self.assertTrue(self.run_function(\(dqtest.ping\(dq)) + + +def test_ping2(self): + \(dq\(dq\(dq + test.ping + \(dq\(dq\(dq + for _ in range(10): + self.assertTrue(self.run_function(\(dqtest.ping\(dq)) +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +You can enable or disable test groups locally by passing there respected flag: +.INDENT 0.0 +.IP \(bu 2 +\-\-no\-fast\-tests +.IP \(bu 2 +\-\-slow\-tests +.IP \(bu 2 +\-\-core\-tests +.IP \(bu 2 +\-\-flaky\-jail +.UNINDENT +.sp +In your PR you can enable or disable test groups by setting a label. +All thought the fast, slow and core tests specified in the change file will always run. +.INDENT 0.0 +.IP \(bu 2 +test:no\-fast +.IP \(bu 2 +test:slow +.IP \(bu 2 +test:core +.IP \(bu 2 +test:flaky\-jail +.UNINDENT .SS Automated Test Runs .sp SaltStack maintains a Jenkins server which can be viewed at @@ -53789,6 +54067,8 @@ providers: .sp The above example will force the minion to use the \fBsystemd\fP module to provide service management, and the \fI\%aptpkg\fP module to provide package management. +.sp +For per\-state provider overrides, see documentation on \fI\%state providers\fP\&. .SS Logging Restrictions .sp As a rule, logging should not be done anywhere in a Salt module before it is @@ -54518,6 +54798,16 @@ Or it can be set for each file: .fi .UNINDENT .UNINDENT +.sp +The backup_mode can be set to any of the following options: +.INDENT 0.0 +.IP \(bu 2 +\fBminion\fP: backup to the minion. +.IP \(bu 2 +\fBmaster\fP: backup to the master, a planned mode that has not yet been implemented, so does nothing. +.IP \(bu 2 +\fBboth\fP: backup to both. a combination of both master and minion. +.UNINDENT .SS Backed\-up Files .sp The files will be saved in the minion cachedir under the directory named @@ -57417,6 +57707,34 @@ test: .UNINDENT .UNINDENT +.sp +Changed in version 3006.0: Since the \fBunless\fP requisite utilizes \fBcmd.retcode\fP, certain parameters +included in the state are passed along to \fBcmd.retcode\fP\&. On occasion this +can cause issues, particularly if the \fBshell\fP option in a \fBuser.present\fP +is set to /sbin/nologin and this shell is passed along to \fBcmd.retcode\fP\&. +This would cause \fBcmd.retcode\fP to run the command using that shell which +would fail regardless of the result of the command. +.sp +By including \fBshell\fP in \fBcmd_opts_exclude\fP, that parameter would not be +passed along to the call to \fBcmd.retcode\fP\&. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +jim_nologin: + user.present: + \- name: jim + \- shell: /sbin/nologin + \- unless: + \- echo hello world + \- cmd_opts_exclude: + \- shell +.ft P +.fi +.UNINDENT +.UNINDENT + .SS onlyif .sp New in version 2014.7.0. @@ -57526,7 +57844,35 @@ test: .UNINDENT .UNINDENT -.SS Creates +.sp +Changed in version 3006.0: Since the \fBonlyif\fP requisite utilizes \fBcmd.retcode\fP, certain parameters +included in the state are passed along to \fBcmd.retcode\fP\&. On occasion this +can cause issues, particularly if the \fBshell\fP option in a \fBuser.present\fP +is set to /sbin/nologin and this shell is passed along to \fBcmd.retcode\fP\&. +This would cause \fBcmd.retcode\fP to run the command using that shell which +would fail regardless of the result of the command. +.sp +By including \fBshell\fP in \fBcmd_opts_exclude\fP, that parameter would not be +passed along to the call to \fBcmd.retcode\fP\&. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +jim_nologin: + user.present: + \- name: jim + \- shell: /sbin/nologin + \- onlyif: + \- echo hello world + \- cmd_opts_exclude: + \- shell +.ft P +.fi +.UNINDENT +.UNINDENT + +.SS creates .sp New in version 3001. @@ -60904,8 +61250,12 @@ Beacons are typically enabled by placing a \fBbeacons:\fP top level block in beacons: inotify: \- files: - /etc/important_file: {} - /opt: {} + /etc/important_file: + mask: + \- modify + /opt: + mask: + \- modify .ft P .fi .UNINDENT @@ -60936,11 +61286,15 @@ Multiple copies of a particular Salt beacon can be configured by including the \ beacons: watch_important_file: \- files: - /etc/important_file: {} + /etc/important_file: + mask: + \- modify \- beacon_module: inotify watch_another_file: \- files: - /etc/another_file: {} + /etc/another_file: + mask: + \- modify \- beacon_module: inotify .ft P .fi @@ -60959,8 +61313,12 @@ and 10\-second intervals: beacons: inotify: \- files: - /etc/important_file: {} - /opt: {} + /etc/important_file: + mask: + \- modify + /opt: + mask: + \- modify \- interval: 5 \- disable_during_state_run: True load: @@ -60999,6 +61357,8 @@ beacons: inotify: \- files: /etc/important_file: {} + mask: + \- modify \- disable_during_state_run: True .ft P .fi @@ -61143,7 +61503,7 @@ Add the following to \fB/srv/reactor/revert.sls\fP: .ft C revert\-file: local.state.apply: - \- tgt: {{ data[\(aqdata\(aq][\(aqid\(aq] }} + \- tgt: {{ data[\(aqid\(aq] }} \- arg: \- maintain_important_file .ft P @@ -61160,13 +61520,6 @@ to modify the watched file, it is important to ensure the state applied is also \fI\%idempotent\fP\&. .UNINDENT .UNINDENT -.sp -\fBNOTE:\fP -.INDENT 0.0 -.INDENT 3.5 -The expression \fB{{ data[\(aqdata\(aq][\(aqid\(aq] }}\fP \fI\%is correct\fP as it matches the event structure \fI\%shown above\fP\&. -.UNINDENT -.UNINDENT .SS State SLS .sp Create the state sls file referenced by the reactor sls file. This state file @@ -61587,6 +61940,14 @@ in \fI\%local reactions\fP, but as noted above this is not very user\-friendly. Therefore, the new config schema is recommended if the master is running a supported release. .sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +State ids of reactors for runners and wheels should all be unique. They can +overwrite each other when added to the async queue causing lost reactions. +.UNINDENT +.UNINDENT +.sp The below two examples are equivalent: .TS center; @@ -61658,6 +62019,14 @@ Like \fI\%runner reactions\fP, the old config schema called for wheel reactions to have arguments passed directly under the name of the \fI\%wheel function\fP (or in \fBarg\fP or \fBkwarg\fP parameters). .sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +State ids of reactors for runners and wheels should all be unique. They can +overwrite each other when added to the async queue causing lost reactions. +.UNINDENT +.UNINDENT +.sp The below two examples are equivalent: .TS center; @@ -84826,37 +85195,630 @@ on the needs of the deployed vm. .sp Starting in 3006, only onedir packaging will be available. The 3006 onedir packages are built with the \fI\%relenv\fP tool. +.SS Docker Containers +.sp +The Salt Project uses docker containers to build our deb and rpm packages. If you are building your own packages you can use +the same containers we build with in the Github piplines. These containers are documented \fI\%here\fP\&. +.SS How to build onedir only +.INDENT 0.0 +.IP 1. 3 +Install relenv: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pip install relenv +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 2. 3 +Fetch toolchain (Only required for linux OSs) +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +relenv toolchain fetch +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 3. 3 +Fetch Native Python Build: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +relenv fetch \-\-python= +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 4. 3 +Create relenv environment: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +relenv create \-\-python= +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 5. 3 +Add Salt into onedir. +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +/bin/pip install /path/to/salt +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT .SS How to build rpm packages +.INDENT 0.0 +.IP 1. 3 +Ensure you are in the current Salt cloned git repo: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +cd +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 2. 3 +Install the dependencies: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +yum \-y install python3 python3\-pip openssl git rpmdevtools rpmlint systemd\-units libxcrypt\-compat git gnupg2 jq createrepo rpm\-sign rustc cargo epel\-release +yum \-y install patchelf +pip install awscli +.ft P +.fi +.UNINDENT +.UNINDENT +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pip install \-r requirements/static/ci/py{python_version}/tools.txt +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 3. 3 +(Optional) To build a specific Salt version, you will need to install tools and changelog dependencies: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 .sp -You only need to run rpmbuild in the Salt repo: +.nf +.ft C +pip install \-r requirements/static/ci/py{python_version}/changelog.txt +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 4. 3 +(Optional) To build a specific Salt version, run tools and set Salt version: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools changelog update\-rpm +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 5. 3 +Build the RPM: +.INDENT 3.0 +.INDENT 3.5 +Only the arch argument is required, the rest are optional. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -# rpmbuild \-bb \-\-define=\(dq_salt_src $(pwd)\(dq $(pwd)/pkg/rpm/salt.spec +tools pkg build rpm \-\-relenv\-version \-\-python\-version \-\-arch .ft P .fi .UNINDENT .UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT .SS How to build deb packages +.INDENT 0.0 +.IP 1. 3 +Ensure you are in the current Salt cloned git repo.: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +cd +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 2. 3 +Install the dependencies: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +apt install \-y apt\-utils gnupg jq awscli python3 python3\-venv python3\-pip build\-essential devscripts debhelper bash\-completion git patchelf rustc +.ft P +.fi +.UNINDENT +.UNINDENT +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pip install \-r requirements/static/ci/py{python_version}/tools.txt +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 3. 3 +(Optional) To build a specific Salt version, you will need to install changelog dependencies: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pip install \-r requirements/static/ci/py{python_version}/changelog.txt +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 4. 3 +(Optional) To build a specific Salt version, run tools and set Salt version: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 .sp -You only need to add a symlink and run debuild in the Salt repo: +.nf +.ft C +tools changelog update\-deb +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 5. 3 +Build the deb package: +.INDENT 3.0 +.INDENT 3.5 +Only the arch argument is required, the rest are optional. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools pkg build deb \-\-relenv\-version \-\-python\-version \-\-arch +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.SS How to build MacOS packages +.INDENT 0.0 +.IP 1. 3 +Ensure you are in the current Salt cloned git repo.: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +cd +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 2. 3 +Install the dependencies: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pip install \-r requirements/static/ci/py{python_version}/tools.txt +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 3. 3 +Build the MacOS package: +.INDENT 3.0 +.INDENT 3.5 +Only the salt\-version argument is required, the rest are optional. +Do note that you will not be able to sign the packages when building them. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools pkg build macos \-\-salt\-version +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.SS How to build Windows packages +.INDENT 0.0 +.IP 1. 3 +Ensure you are in the current Salt cloned git repo.: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +cd +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 2. 3 +Install the dependencies: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pip install \-r requirements/static/ci/py{python_version}/tools.txt +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 3. 3 +Build the MacOS package: +.INDENT 3.0 +.INDENT 3.5 +Only the arch and salt\-version arguments are required, the rest are optional. +Do note that you will not be able to sign the packages when building them. .INDENT 0.0 .INDENT 3.5 .sp .nf .ft C -# ln \-s pkg/deb/debian debian -# debuild \-uc \-us +tools pkg build windows \-\-salt\-version \-\-arch .ft P .fi .UNINDENT .UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT .SS How to access python binary .sp The python library is available in the install directory of the onedir package. For example on linux the default location would be \fB/opt/saltstack/salt/bin/python3\fP\&. +.SS Testing the packages +.sp +If you want to test your built packages, or any other collection of salt packages post 3006.0, follow \fI\%this guide\fP +.SS Testing packages +.SS The package test suite +.sp +The salt repo provides a test suite for testing basic functionality of our +packages at \fB/pkg/tests/\fP\&. You can run the install, upgrade, and +downgrade tests. These tests run automatically on most PRs that are submitted +against Salt. +.sp +\fBWARNING:\fP +.INDENT 0.0 +.INDENT 3.5 +These tests make destructive changes to your system because they install the +built packages onto the system. They may also install older versions in the +case of upgrades or downgrades. To prevent destructive changes, run the +tests in an isolated system, preferably a virtual machine. +.UNINDENT +.UNINDENT +.SS Setup +.sp +In order to run the package tests, the \fI\%relenv\fP onedir and +built packages need to be placed in the correct locations. +.INDENT 0.0 +.IP \(bu 2 +Place all salt packages for the applicable testing version in +\fB/pkg/artifacts/\fP\&. +.IP \(bu 2 +The onedir must be located under \fB/artifacts/\fP\&. +.IP \(bu 2 +Additionally, to ensure complete parity with Salt\(aqs CI/CD suite, place the +\fBnox\fP virtual environment in \fB/.nox/test\-pkgs\-onedir\fP\&. +.UNINDENT +.sp +The following are a few ways this can be accomplished easily. +.sp +You can ensure parity by installing the package test suite through a few +possible methods: +.INDENT 0.0 +.IP \(bu 2 +Using \fBtools\fP +.IP \(bu 2 +Downloading individually +.UNINDENT +.SS Using \fBtools\fP +.sp +Salt has preliminary support for setting up the package test suite in the +\fBtools\fP command suite that is located under \fB/tools/testsuite/\fP\&. +This method requires the Github CLI tool \fBgh\fP (\fI\%https://cli.github.com/\fP) to be properly configured for +interaction with the salt repo. +.INDENT 0.0 +.IP 1. 3 +Install the dependencies using this command: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pip install \-r requirements/static/ci/py{python_version}/tools.txt +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 2. 3 +Download and extract the artifacts with this \fBtools\fP command: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools ts setup \-\-platform {linux|darwin|windows} \-\-slug + \-\-pr \-\-pkg +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +The most common use case is to test the packages built on a CI/CD run for a +given PR. To see the possible options for each argument, and other ways to +utilize this command, use the following: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools ts setup \-h +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.sp +\fBWARNING:\fP +.INDENT 0.0 +.INDENT 3.5 +You can only download artifacts from finished workflow runs. This is something +imposed by the GitHub API. +To download artifacts from a running workflow run, you either have to wait for +the finish or cancel it. +.UNINDENT +.UNINDENT +.SS Downloading individually +.sp +If the \fBtools ts setup\fP command doesn\(aqt work, you can download, unzip, and +place the artifacts in the correct locations manually. Typically, you want to +test packages built on a CI/CD run for a given PR. This guide explains how to +set up for running the package tests using those artifacts. An analogous process +can be performed for artifacts from nightly builds. +.INDENT 0.0 +.IP 1. 3 +Find and download the artifacts: +.INDENT 3.0 +.INDENT 3.5 +Under the summary page for the most recent actions run for that PR, there is +a list of available artifacts from that run that can be downloaded. Download +the package artifacts by finding +\fBsalt\-.+.\-\-\fP\&. For example, the +amd64 deb packages might look like: +\fBsalt\-3006.2+123.01234567890\-x86_64\-deb\fP\&. +.sp +The onedir artifact will look like +\fBsalt\-.+.\-onedir\-\-.tar.xz\fP\&. For +instance, the macos x86_64 onedir may have the name +\fBsalt\-3006.2+123.01234567890\-onedir\-darwin\-x86_64.tar.xz\fP\&. +.sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +Windows onedir artifacts have \fB\&.zip\fP extensions instead of \fBtar.xz\fP +.UNINDENT +.UNINDENT +.sp +While it is optional, it is recommended to download the \fBnox\fP session +artifact as well. This will have the form of +\fBnox\-\-test\-pkgs\-onedir\-\fP\&. The amd64 Ubuntu 20.04 nox +artifact may look like \fBnox\-ubuntu\-20.04\-test\-pkgs\-onedir\-x86_64\fP\&. +.UNINDENT +.UNINDENT +.IP 2. 3 +Place the artifacts in the correct location: +.INDENT 3.0 +.INDENT 3.5 +Unzip the packages and place them in \fB/pkg/artifacts/\fP\&. +.sp +You must unzip and untar the onedir packages and place them in +\fB/artifacts/\fP\&. Windows onedir requires an additional unzip +action. If you set it up correctly, the \fB/artifacts/salt\fP +directory then contains the uncompressed onedir files. +.sp +Additionally, decompress the \fBnox\fP artifact and place it under +\fB/.nox/\fP\&. +.UNINDENT +.UNINDENT +.UNINDENT +.SS Running the tests +.sp +You can run the test suite run if all the artifacts are in the correct location. +.sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +You need root access to run the test artifacts. Run all nox commands at the +root of the salt repo and as the root user. +.UNINDENT +.UNINDENT +.INDENT 0.0 +.IP 1. 3 +Install \fBnox\fP: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +pip install nox +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 2. 3 +Run the install tests: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +nox \-e test\-pkgs\-onedir \-\- install +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.IP 3. 3 +Run the upgrade or downgrade tests: +.INDENT 3.0 +.INDENT 3.5 +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +nox \-e test\-pkgs\-onedir \-\- upgrade \-\-prev\-version +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +You can run the downgrade tests in the same way, replacing \fBupgrade\fP with +\fBdowngrade\fP\&. +.sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +If you are testing upgrades or downgrades and classic packages are +available for your system, replace \fBupgrade\fP or +\fBdowngrade\fP with \fBupgrade\-classic\fP or \fBdowngrade\-classic\fP +respectively to test against those versions. +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT +.UNINDENT .SH COMMAND LINE REFERENCE .SS salt\-api .SS \fBsalt\-api\fP @@ -87588,19 +88550,6 @@ The Python interface to PAM does not support authenticating as \fBroot\fP\&. \fBNOTE:\fP .INDENT 0.0 .INDENT 3.5 -Using PAM groups with SSSD groups on python2. -.sp -To use sssd with the PAM eauth module and groups the \fIpysss\fP module is -needed. On RedHat/CentOS this is \fIpython\-sss\fP\&. -.sp -This should not be needed with python >= 3.3, because the \fIos\fP modules has the -\fIgetgrouplist\fP function. -.UNINDENT -.UNINDENT -.sp -\fBNOTE:\fP -.INDENT 0.0 -.INDENT 3.5 This module executes itself in a subprocess in order to user the system python and pam libraries. We do this to avoid openssl version conflicts when running under a salt onedir build. @@ -90282,7 +91231,7 @@ Minion data cache plugin for Consul key/value data store. New in version 2016.11.2. .sp -Changed in version 3005.0: Timestamp/cache updated support added. +Changed in version 3005: Timestamp/cache updated support added. .INDENT 0.0 .TP @@ -91745,7 +92694,7 @@ salt\-cloud \-a stop myminion .SS salt.cloud.clouds.clc .SS CenturyLink Cloud Module .sp -New in version 2018.3. +New in version 2018.3.0. .sp The CLC cloud module allows you to manage CLC Via the CLC SDK. @@ -108831,7 +109780,7 @@ the desired command. .nf .ft C engines: - \- slack: + \- slack_bolt: app_token: \(dqxapp\-x\-xxxxxxxxxxx\-xxxxxxxxxxxxx\-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\(dq bot_token: \(aqxoxb\-xxxxxxxxxx\-xxxxxxxxxxxxxxxxxxxxxxxx\(aq control: True @@ -108875,7 +109824,7 @@ must be quoted, or else PyYAML will fail to load the configuration. .nf .ft C engines: - \- slack: + \- slack_bolt: groups_pillar: slack_engine_pillar app_token: \(dqxapp\-x\-xxxxxxxxxxx\-xxxxxxxxxxxxx\-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx\(dq bot_token: \(aqxoxb\-xxxxxxxxxx\-xxxxxxxxxxxxxxxxxxxxxxxx\(aq @@ -110341,7 +111290,7 @@ _ T{ \fI\%chocolatey\fP T} T{ -A dead simple module wrapping calls to the Chocolatey package manager (\fI\%http://chocolatey.org\fP) +A module that wraps calls to the Chocolatey package manager (\fI\%http://chocolatey.org\fP) T} _ T{ @@ -113145,7 +114094,7 @@ _ .SS salt.modules.acme .SS ACME / Let\(aqs Encrypt module .sp -New in version 2016.3. +New in version 2016.3.0. .sp This module currently looks for certbot script in the $PATH as @@ -116361,7 +117310,7 @@ salt \(aq*\(aq pkg.latest_version ... .INDENT 0.0 .TP .B salt.modules.aptpkg.list_downloaded(root=None, **kwargs) -New in version 3000?. +New in version 3000. .sp List prefetched packages downloaded by apt in the local disk. @@ -118943,6 +119892,7 @@ CLI Example: salt \(aq*\(aq at.at [tag=] [runas=] salt \(aq*\(aq at.at 12:05am \(aq/sbin/reboot\(aq tag=reboot salt \(aq*\(aq at.at \(aq3:05am +3 days\(aq \(aqbin/myscript\(aq tag=nightly runas=jim +salt \(aq*\(aq at.at \(aq\(dq22:02\(dq\(aq \(aqbin/myscript\(aq tag=nightly runas=jim .ft P .fi .UNINDENT @@ -147731,7 +148681,7 @@ Enable whyrun mode when set to True .UNINDENT .SS salt.modules.chocolatey .sp -A dead simple module wrapping calls to the Chocolatey package manager +A module that wraps calls to the Chocolatey package manager (\fI\%http://chocolatey.org\fP) .sp New in version 2014.1.0. @@ -147742,26 +148692,25 @@ New in version 2014.1.0. Instructs Chocolatey to add a source. .INDENT 7.0 .TP -.B name -The name of the source to be added as a chocolatey repository. -.TP -.B source -Location of the source you want to work with. -.TP -.B username -Provide username for chocolatey sources that need authentication +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the source to be added as a chocolatey repository. +.IP \(bu 2 +\fBsource\fP (\fI\%str\fP) \-\- Location of the source you want to work with. +.IP \(bu 2 +\fBusername\fP (\fI\%str\fP) \-\- Provide username for chocolatey sources that need authentication credentials. -.TP -.B password -Provide password for chocolatey sources that need authentication +.IP \(bu 2 +\fBpassword\fP (\fI\%str\fP) \-\- Provide password for chocolatey sources that need authentication credentials. -.TP -.B priority -The priority order of this source as compared to other sources, +.IP \(bu 2 +\fBpriority\fP (\fI\%int\fP) \-\- The priority order of this source as compared to other sources, lower is better. Defaults to 0 (no priority). All priorities above 0 will be evaluated first, then zero\-based values will be evaluated in config file order. .UNINDENT +.UNINDENT .sp CLI Example: .INDENT 7.0 @@ -147881,8 +148830,8 @@ salt \(aq*\(aq chocolatey.chocolatey_version Instructs Chocolatey to disable a source. .INDENT 7.0 .TP -.B name -Name of the source repository to disable. +.B Parameters +\fBname\fP (\fI\%str\fP) \-\- Name of the source repository to disable. .UNINDENT .sp CLI Example: @@ -147903,8 +148852,8 @@ salt \(aq*\(aq chocolatey.disable_source Instructs Chocolatey to enable a source. .INDENT 7.0 .TP -.B name -Name of the source repository to enable. +.B Parameters +\fBname\fP (\fI\%str\fP) \-\- Name of the source repository to enable. .UNINDENT .sp CLI Example: @@ -147932,13 +148881,13 @@ Instructs Chocolatey to install a package. argument. Required. .IP \(bu 2 \fBversion\fP (\fI\%str\fP) \-\- Install a specific version of the package. Defaults to latest -version. Default is None. +version. Default is \fBNone\fP\&. .IP \(bu 2 \fBsource\fP (\fI\%str\fP) \-\- .sp Chocolatey repository (directory, share or remote URL feed) the package comes from. Defaults to the official Chocolatey feed. -Default is None. +Default is \fBNone\fP\&. .sp Alternate Sources: .INDENT 2.0 @@ -147956,26 +148905,28 @@ windowsfeatures .IP \(bu 2 \fBforce\fP (\fI\%bool\fP) \-\- Reinstall the current version of an existing package. Do not use -with \fBallow_multiple\fP\&. Default is False. +with \fBallow_multiple\fP\&. Default is \fBFalse\fP\&. .IP \(bu 2 -\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages. Default is False. +\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages. Default is \fBFalse\fP\&. .IP \(bu 2 \fBinstall_args\fP (\fI\%str\fP) \-\- A list of install arguments you want to pass to the installation -process i.e product key or feature list. Default is None. +process, i.e. product key or feature list. Default is \fBNone\fP\&. .IP \(bu 2 \fBoverride_args\fP (\fI\%bool\fP) \-\- Set to true if you want to override the original install arguments (for the native installer) in the package and use your own. When -this is set to False install_args will be appended to the end of the -default arguments. Default is None. +this is set to \fBFalse\fP install_args will be appended to the end of +the default arguments. Default is \fBNone\fP\&. .IP \(bu 2 -\fBforce_x86\fP (\fI\%bool\fP) \-\- Force x86 (32bit) installation on 64 bit systems. Default is False. +\fBforce_x86\fP (\fI\%bool\fP) \-\- Force x86 (32bit) installation on 64bit systems. Default is +\fBFalse\fP\&. .IP \(bu 2 -\fBpackage_args\fP (\fI\%str\fP) \-\- Arguments you want to pass to the package. Default is None. +\fBpackage_args\fP (\fI\%str\fP) \-\- Arguments you want to pass to the package. Default is \fBNone\fP\&. .IP \(bu 2 \fBallow_multiple\fP (\fI\%bool\fP) \-\- .sp Allow multiple versions of the package to be installed. Do not use -with \fBforce\fP\&. Does not work with all packages. Default is False. +with \fBforce\fP\&. Does not work with all packages. Default is +\fBFalse\fP\&. .sp New in version 2017.7.0. @@ -147984,7 +148935,7 @@ New in version 2017.7.0. \fBexecution_timeout\fP (\fI\%str\fP) \-\- .sp Chocolatey execution timeout value you want to pass to the -installation process. Default is None. +installation process. Default is \fBNone\fP\&. .sp New in version 2018.3.0. @@ -148018,18 +148969,20 @@ salt \(aq*\(aq chocolatey.install install_args= override_ar Instructs Chocolatey to install a package via Cygwin. .INDENT 7.0 .TP -.B name -The name of the package to be installed. Only accepts a single argument. -.TP -.B install_args -A list of install arguments you want to pass to the installation process -i.e product key or feature list -.TP -.B override_args -Set to true if you want to override the original install arguments (for -the native installer) in the package and use your own. When this is set -to False install_args will be appended to the end of the default -arguments +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the package to be installed. Only accepts a single +argument. +.IP \(bu 2 +\fBinstall_args\fP (\fI\%str\fP) \-\- A list of install arguments you want to pass to the installation +process, i.e. product key or feature list +.IP \(bu 2 +\fBoverride_args\fP (\fI\%bool\fP) \-\- Set to \fBTrue\fP if you want to override the original install +arguments (for the native installer) in the package and use your +own. When this is set to \fBFalse\fP install_args will be appended to +the end of the default arguments +.UNINDENT .UNINDENT .sp CLI Example: @@ -148051,22 +149004,23 @@ salt \(aq*\(aq chocolatey.install_cygwin install_args= over Instructs Chocolatey to install a package via Ruby\(aqs Gems. .INDENT 7.0 .TP -.B name -The name of the package to be installed. Only accepts a single argument. -.TP -.B version -Install a specific version of the package. Defaults to latest version -available. -.TP -.B install_args -A list of install arguments you want to pass to the installation process -i.e product key or feature list -.TP -.B override_args -Set to true if you want to override the original install arguments (for -the native installer) in the package and use your own. When this is set -to False install_args will be appended to the end of the default -arguments +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the package to be installed. Only accepts a single +argument. +.IP \(bu 2 +\fBversion\fP (\fI\%str\fP) \-\- Install a specific version of the package. Defaults to the latest +version available. +.IP \(bu 2 +\fBinstall_args\fP (\fI\%str\fP) \-\- A list of install arguments you want to pass to the installation +process, i.e. product key or feature list +.IP \(bu 2 +\fBoverride_args\fP (\fI\%bool\fP) \-\- Set to \fBTrue\fP if you want to override the original install +arguments (for the native installer) in the package and use your +own. When this is set to \fBFalse\fP install_args will be appended to +the end of the default arguments +.UNINDENT .UNINDENT .sp CLI Example: @@ -148095,16 +149049,18 @@ in Chocolatey 1.0. .INDENT 7.0 .TP -.B name -The name of the package to be installed. Only accepts a single argument. -.TP -.B version -Install a specific version of the package. Defaults to latest version -available. -.TP -.B source -Chocolatey repository (directory, share or remote URL feed) the package -comes from. Defaults to the official Chocolatey feed. +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the package to be installed. Only accepts a single +argument. +.IP \(bu 2 +\fBversion\fP (\fI\%str\fP) \-\- Install a specific version of the package. Defaults to the latest +version available. +.IP \(bu 2 +\fBsource\fP (\fI\%str\fP) \-\- Chocolatey repository (directory, share or remote URL feed) the +package comes from. Defaults to the official Chocolatey feed. +.UNINDENT .UNINDENT .sp CLI Example: @@ -148126,22 +149082,23 @@ salt \(aq*\(aq chocolatey.install_missing version= install_args= over Instructs Chocolatey to install a package via the Microsoft Web PI service. .INDENT 7.0 .TP -.B name -The name of the package to be installed. Only accepts a single argument. -.TP -.B install_args -A list of install arguments you want to pass to the installation process -i.e product key or feature list -.TP -.B override_args -Set to true if you want to override the original install arguments (for -the native installer) in the package and use your own. When this is set -to False install_args will be appended to the end of the default -arguments +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the package to be installed. Only accepts a single +argument. +.IP \(bu 2 +\fBinstall_args\fP (\fI\%str\fP) \-\- A list of install arguments you want to pass to the installation +process, i.e. product key or feature list. +.IP \(bu 2 +\fBoverride_args\fP (\fI\%bool\fP) \-\- Set to \fBTrue\fP if you want to override the original install +arguments (for the native installer) in the package and use your +own. When this is set to \fBFalse\fP install_args will be appended to +the end of the default arguments. +.UNINDENT .UNINDENT .sp CLI Example: @@ -148198,8 +149157,9 @@ Instructs Chocolatey to install a Windows Feature via the Deployment Image Servicing and Management tool. .INDENT 7.0 .TP -.B name -The name of the feature to be installed. Only accepts a single argument. +.B Parameters +\fBname\fP (\fI\%str\fP) \-\- The name of the feature to be installed. Only accepts a single +argument. .UNINDENT .sp CLI Example: @@ -148234,11 +149194,11 @@ name/description/tag. Default is None. package comes from. Defaults to the official Chocolatey feed if None is passed. Default is None. .IP \(bu 2 -\fBlocal_only\fP (\fI\%bool\fP) \-\- Display packages only installed locally. Default is False. +\fBlocal_only\fP (\fI\%bool\fP) \-\- Only display packages that are installed locally. Default is False. .IP \(bu 2 \fBexact\fP (\fI\%bool\fP) \-\- .sp -Display only packages that match \fBnarrow\fP exactly. Default is +Only display packages that match \fBnarrow\fP exactly. Default is False. .sp New in version 2017.7.0. @@ -148339,9 +149299,6 @@ salt \(aq*\(aq chocolatey.list_windowsfeatures .TP .B salt.modules.chocolatey.unbootstrap() Uninstall chocolatey from the system by doing the following: -.sp -New in version 3001. - .INDENT 7.0 .IP \(bu 2 Delete the Chocolatey Directory @@ -148350,6 +149307,9 @@ Remove Chocolatey from the path .IP \(bu 2 Remove Chocolatey environment variables .UNINDENT +.sp +New in version 3001. + .INDENT 7.0 .TP .B Returns @@ -148373,27 +149333,27 @@ salt * chocolatey.unbootstrap .UNINDENT .INDENT 0.0 .TP -.B salt.modules.chocolatey.uninstall(name, version=None, uninstall_args=None, override_args=False) +.B salt.modules.chocolatey.uninstall(name, version=None, uninstall_args=None, override_args=False, force=False) Instructs Chocolatey to uninstall a package. .INDENT 7.0 .TP -.B name -The name of the package to be uninstalled. Only accepts a single +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the package to be uninstalled. Only accepts a single argument. -.TP -.B version -Uninstalls a specific version of the package. Defaults to latest version -installed. -.TP -.B uninstall_args -A list of uninstall arguments you want to pass to the uninstallation -process i.e product key or feature list -.TP -.B override_args -Set to true if you want to override the original uninstall arguments -(for the native uninstaller) in the package and use your own. When this -is set to False uninstall_args will be appended to the end of the -default arguments +.IP \(bu 2 +\fBversion\fP (\fI\%str\fP) \-\- Uninstalls a specific version of the package. Defaults to the latest +version installed. +.IP \(bu 2 +\fBuninstall_args\fP (\fI\%str\fP) \-\- A list of uninstall arguments you want to pass to the uninstallation +process, i.e. product key or feature list. +.IP \(bu 2 +\fBoverride_args\fP \-\- Set to \fBTrue\fP if you want to override the original uninstall +arguments (for the native uninstaller) in the package and use your +own. When this is set to \fBFalse\fP uninstall_args will be appended +to the end of the default arguments. +.UNINDENT .UNINDENT .sp CLI Example: @@ -148416,16 +149376,17 @@ salt \(aq*\(aq chocolatey.uninstall version= uni Instructs Chocolatey to update packages on the system. .INDENT 7.0 .TP -.B name -The name of the package to update, or \(dqall\(dq to update everything +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the package to update, or \(dqall\(dq to update everything installed on the system. -.TP -.B source -Chocolatey repository (directory, share or remote URL feed) the package -comes from. Defaults to the official Chocolatey feed. -.TP -.B pre_versions -Include pre\-release packages in comparison. Defaults to False. +.IP \(bu 2 +\fBsource\fP (\fI\%str\fP) \-\- Chocolatey repository (directory, share or remote URL feed) the +package comes from. Defaults to the official Chocolatey feed. +.IP \(bu 2 +\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages in comparison. Defaults to \fBFalse\fP\&. +.UNINDENT .UNINDENT .sp CLI Example: @@ -148463,21 +149424,22 @@ version. \fBsource\fP (\fI\%str\fP) \-\- Chocolatey repository (directory, share or remote URL feed) the package comes from. Defaults to the official Chocolatey feed. .IP \(bu 2 -\fBforce\fP (\fI\%bool\fP) \-\- Reinstall the \fBsame\fP version already installed +\fBforce\fP (\fI\%bool\fP) \-\- Reinstall the \fBsame\fP version already installed. .IP \(bu 2 -\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages in comparison. Defaults to False. +\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages in comparison. Defaults to \fBFalse\fP\&. .IP \(bu 2 \fBinstall_args\fP (\fI\%str\fP) \-\- A list of install arguments you want to pass to the installation -process i.e product key or feature list +process, i.e. product key or feature list. .IP \(bu 2 -\fBoverride_args\fP (\fI\%str\fP) \-\- Set to true if you want to override the original install arguments -(for the native installer) in the package and use your own. When -this is set to False install_args will be appended to the end of the -default arguments +\fBoverride_args\fP (\fI\%bool\fP) \-\- Set to \fBTrue\fP if you want to override the original install +arguments (for the native installer) in the package and use your +own. When this is set to \fBFalse\fP install_args will be appended to +the end of the default arguments. .IP \(bu 2 -\fBforce_x86\fP \-\- Force x86 (32bit) installation on 64 bit systems. Defaults to false. +\fBforce_x86\fP (\fI\%bool\fP) \-\- Force x86 (32bit) installation on 64bit systems. Defaults to +\fBFalse\fP\&. .IP \(bu 2 -\fBpackage_args\fP \-\- A list of arguments you want to pass to the package +\fBpackage_args\fP (\fI\%str\fP) \-\- A list of arguments you want to pass to the package. .UNINDENT .TP .B Returns @@ -148513,13 +149475,13 @@ compare it to one available from a remote feed. \fBname\fP (\fI\%str\fP) \-\- The name of the package to check. Required. .IP \(bu 2 \fBcheck_remote\fP (\fI\%bool\fP) \-\- Get the version number of the latest package from the remote feed. -Default is False. +Default is \fBFalse\fP\&. .IP \(bu 2 \fBsource\fP (\fI\%str\fP) \-\- Chocolatey repository (directory, share or remote URL feed) the package comes from. Defaults to the official Chocolatey feed. -Default is None. +Default is \fBNone\fP\&. .IP \(bu 2 -\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages in comparison. Default is False. +\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages in comparison. Default is \fBFalse\fP\&. .UNINDENT .TP .B Returns @@ -167549,13 +168511,13 @@ Support for DEB packages New in version 2015.8.0. .sp -Parses RPM metadata and returns a dictionary of information about the +Parses DEB metadata and returns a dictionary of information about the package (name, version, etc.). .INDENT 7.0 .TP .B path Path to the file. Can either be an absolute path to a file on the -minion, or a salt fileserver URL (e.g. \fBsalt://path/to/file.rpm\fP). +minion, or a salt fileserver URL (e.g. \fBsalt://path/to/file.deb\fP). If a salt fileserver URL is passed, the file will be cached to the minion so that it can be examined. .TP @@ -181170,7 +182132,7 @@ Changed in version 2016.3.0. .B https_user Set HTTP Basic Auth username. Only accepted for HTTPS URLs. .sp -New in version 20515.5.0. +New in version 2015.5.0. .TP .B https_pass @@ -187826,7 +188788,7 @@ Receive key(s) from keyserver and add them to keychain .INDENT 7.0 .TP .B keyserver -Keyserver to use for searching for GPG keys, defaults to pgp.mit.edu +Keyserver to use for searching for GPG keys, defaults to keys.openpgp.org .TP .B keys The keyID(s) to retrieve from the keyserver. Can be specified as a comma @@ -187867,7 +188829,7 @@ Search keys from keyserver Text to search the keyserver for, e.g. email address, keyID or fingerprint. .TP .B keyserver -Keyserver to use for searching for GPG keys, defaults to pgp.mit.edu. +Keyserver to use for searching for GPG keys, defaults to keys.openpgp.org. .TP .B user Which user\(aqs keychain to access, defaults to user Salt is running as. @@ -193428,7 +194390,7 @@ Passes through all the parameters described in the \fI\%utils.http.query function\fP: .INDENT 7.0 .TP -.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.0rc1\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs) +.B salt.utils.http.query(url, method=\(aqGET\(aq, params=None, data=None, data_file=None, header_dict=None, header_list=None, header_file=None, username=None, password=None, auth=None, decode=False, decode_type=\(aqauto\(aq, status=False, headers=False, text=False, cookies=None, cookie_jar=None, cookie_format=\(aqlwp\(aq, persist_session=False, session_cookie_jar=None, data_render=False, data_renderer=None, header_render=False, header_renderer=None, template_dict=None, test=False, test_url=None, node=\(aqminion\(aq, port=80, opts=None, backend=None, ca_bundle=None, verify_ssl=None, cert=None, text_out=None, headers_out=None, decode_out=None, stream=False, streaming_callback=None, header_callback=None, handle=False, agent=\(aqSalt/3006.5\(aq, hide_fields=None, raise_error=True, formdata=False, formdata_fieldname=None, formdata_filename=None, decode_body=True, **kwargs) Query a resource, and decode the return data .UNINDENT .INDENT 7.0 @@ -226045,7 +227007,7 @@ New in version 2014.7.0. New in version 2014.7.0. .sp -Changed in version 3004.0: Changed the default character set used to include symbols and implemented arguments to control the used character set. +Changed in version 3004: Changed the default character set used to include symbols and implemented arguments to control the used character set. .sp Returns a random string of the specified length. @@ -226057,7 +227019,7 @@ Any valid number of bytes. .TP .B chars None -New in version 3004.0. +New in version 3004. .sp String with any character that should be used to generate random string. @@ -226066,7 +227028,7 @@ This argument supersedes all other character controlling arguments. .TP .B lowercase True -New in version 3004.0. +New in version 3004. .sp Use lowercase letters in generated random string. @@ -226076,7 +227038,7 @@ This argument is superseded by chars. .TP .B uppercase True -New in version 3004.0. +New in version 3004. .sp Use uppercase letters in generated random string. @@ -226086,7 +227048,7 @@ This argument is superseded by chars. .TP .B digits True -New in version 3004.0. +New in version 3004. .sp Use digits in generated random string. @@ -226096,7 +227058,7 @@ This argument is superseded by chars. .TP .B printable False -New in version 3004.0. +New in version 3004. .sp Use printable characters in generated random string and includes lowercase, uppercase, @@ -226111,7 +227073,7 @@ This argument is superseded by chars. .TP .B punctuation True -New in version 3004.0. +New in version 3004. .sp Use punctuation characters in generated random string. @@ -226121,7 +227083,7 @@ This argument is superseded by chars. .TP .B whitespace False -New in version 3004.0. +New in version 3004. .sp Use whitespace characters in generated random string. @@ -229953,7 +230915,7 @@ This module helps include encrypted passwords in pillars, grains and salt state .INDENT 0.0 .TP .B depends -libnacl, \fI\%https://github.com/saltstack/libnacl\fP +PyNaCl, \fI\%https://github.com/pyca/pynacl\fP .UNINDENT .sp This is often useful if you wish to store your pillars in source control or @@ -230169,9 +231131,9 @@ Optional small program to encrypt data without needing salt modules. .nf .ft C #!/bin/python3 -import sys, base64, libnacl.sealed +import sys, base64, nacl.public pk = base64.b64decode(\(aqYOURPUBKEY\(aq) -b = libnacl.sealed.SealedBox(pk) +b = nacl.public.SealedBox(pk) data = sys.stdin.buffer.read() print(base64.b64encode(b.encrypt(data)).decode()) .ft P @@ -230251,7 +231213,7 @@ salt\-run nacl.enc_file name=/tmp/id_rsa box_type=secretbox sk_file= .INDENT 0.0 .TP .B salt.modules.nacl.keygen(sk_file=None, pk_file=None, **kwargs) -Use libnacl to generate a keypair. +Use PyNaCl to generate a keypair. .sp If no \fIsk_file\fP is defined return a keypair. .sp @@ -251758,13 +252720,13 @@ if False \- attempting to create a bridge that exists returns False. name of the parent bridge (if the bridge shall be created as a fake bridge). If specified, vlan must also be specified. .IP \(bu 2 -\fBversionadded:\fP (\fI\&..\fP) \-\- 3006: +\fBversionadded:\fP (\fI\&..\fP) \-\- 3006.0: .IP \(bu 2 \fBvlan\fP \-\- int VLAN ID of the bridge (if the bridge shall be created as a fake bridge). If specified, parent must also be specified. .IP \(bu 2 -\fBversionadded:\fP \-\- 3006: +\fBversionadded:\fP \-\- 3006.0: .UNINDENT .TP .B Returns @@ -251872,7 +252834,7 @@ salt \(aq*\(aq openvswitch.bridge_list .INDENT 0.0 .TP .B salt.modules.openvswitch.bridge_to_parent(br) -New in version 3006. +New in version 3006.0. .sp Returns the parent bridge of a bridge. @@ -251903,7 +252865,7 @@ salt \(aq*\(aq openvswitch.bridge_to_parent br0 .INDENT 0.0 .TP .B salt.modules.openvswitch.bridge_to_vlan(br) -New in version 3006. +New in version 3006.0. .sp Returns the VLAN ID of a bridge. @@ -251933,7 +252895,7 @@ salt \(aq*\(aq openvswitch.bridge_to_parent br0 .INDENT 0.0 .TP .B salt.modules.openvswitch.db_get(table, record, column, if_exists=False) -New in version 3006. +New in version 3006.0. .sp Gets a column\(aqs value for a specific record. @@ -251974,7 +252936,7 @@ salt \(aq*\(aq openvswitch.db_get Port br0 vlan_mode .INDENT 0.0 .TP .B salt.modules.openvswitch.db_set(table, record, column, value, if_exists=False) -New in version 3006. +New in version 3006.0. .sp Sets a column\(aqs value for a specific record. @@ -260275,7 +261237,7 @@ salt \(aq*\(aq pillar.ext \(dq{\(aqgit\(aq: [{\(aqmybranch https://github.com/my .INDENT 0.0 .TP .B salt.modules.pillar.fetch(key, default=, merge=False, merge_nested_lists=None, delimiter=\(aq:\(aq, pillarenv=None, saltenv=None) -New in version 0.14. +New in version 0.14.0. .sp Attempt to retrieve the named value from \fI\%in\-memory pillar data\fP\&. If the pillar key is not present in the in\-memory @@ -260515,7 +261477,7 @@ salt \(aq*\(aq pillar.filter_by \(aq{web: Serve it up, db: I query, default: x_x .INDENT 0.0 .TP .B salt.modules.pillar.get(key, default=, merge=False, merge_nested_lists=None, delimiter=\(aq:\(aq, pillarenv=None, saltenv=None) -New in version 0.14. +New in version 0.14.0. .sp Attempt to retrieve the named value from \fI\%in\-memory pillar data\fP\&. If the pillar key is not present in the in\-memory @@ -261073,7 +262035,9 @@ Accepts either :all: to disable all binary packages, :none: to empty the set, or one or more package names with commas between them .TP .B log -Log file where a complete (maximum verbosity) record will be kept +Log file where a complete (maximum verbosity) record will be kept. +If this file doesn\(aqt exist and the parent directory is writeable, +it will be created. .TP .B proxy Specify a proxy in the form \fBuser:passwd@proxy.server:port\fP\&. Note @@ -268291,7 +269255,7 @@ PID of process to query. .B attrs Optional list of desired process attributes. The list of possible attributes can be found here: -\fI\%http://pythonhosted.org/psutil/#psutil.Process\fP +\fI\%https://psutil.readthedocs.io/en/latest/#processes\fP .UNINDENT .UNINDENT .INDENT 0.0 @@ -275801,7 +276765,7 @@ salt \(aq*\(aq service.available sshd limit=sysvinit .B salt.modules.rh_service.delete(name, **kwargs) Delete the named service .sp -New in version 2016.3. +New in version 2016.3.0. .sp CLI Example: @@ -278503,7 +279467,7 @@ A simple example might be something like the following: .nf .ft C {# a boolean check #} -{% set option_deprecated = salt[\(aqsalt_version.less_than\(aq](\(dq3001\(dq) %} +{% set option_deprecated = salt[\(aqsalt_version.less_than\(aq](\(dqSodium\(dq) %} {% if option_deprecated %} @@ -278541,7 +279505,7 @@ salt \(aq*\(aq salt_version.equal \(aqOxygen\(aq .TP .B salt.modules.salt_version.get_release_number(name) Returns the release number of a given release code name in a -\fBMAJOR.PATCH\fP format. +\fBMAJOR.PATCH\fP format (for Salt versions < 3000) or \fBMAJOR\fP for newer Salt versions. .sp If the release name has not been given an assigned release number, the function returns a string. If the release cannot be found, it returns @@ -279577,6 +280541,10 @@ proceess, as grains can affect which modules are available. .B refresh_pillar True Set to \fBFalse\fP to keep pillar data from being refreshed. +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -279772,7 +280740,7 @@ salt \(aq*\(aq saltutil.signal_job 15 .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) Changed in version 2015.8.11,2016.3.2: On masterless minions, pillar modules are now synced, and refreshed when \fBrefresh\fP is set to \fBTrue\fP\&. @@ -279822,6 +280790,10 @@ dictionary of modules to sync based on type .B extmod_blacklist None dictionary of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -280018,7 +280990,7 @@ salt \(aq*\(aq saltutil.sync_executors saltenv=base,dev .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_grains(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_grains(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) New in version 0.10.0. .sp @@ -280046,6 +281018,10 @@ comma\-separated list of modules to sync .B extmod_blacklist None comma\-separated list of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp CLI Examples: @@ -280314,7 +281290,7 @@ salt \(aq*\(aq saltutil.sync_output saltenv=base,dev .UNINDENT .INDENT 0.0 .TP -.B salt.modules.saltutil.sync_pillar(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None) +.B salt.modules.saltutil.sync_pillar(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None, clean_pillar_cache=False) New in version 2015.8.11,2016.3.2. .sp @@ -280336,6 +281312,10 @@ comma\-separated list of modules to sync .B extmod_blacklist None comma\-separated list of modules to blacklist based on type +.TP +.B clean_pillar_cache +False +Set to \fBTrue\fP to refresh pillar cache. .UNINDENT .sp \fBNOTE:\fP @@ -295926,7 +296906,7 @@ salt \(aq*\(aq sys.list_state_functions \(aqfile.s*\(aq .UNINDENT .UNINDENT .sp -New in version 2016.9. +New in version 2016.9.0. .INDENT 7.0 .INDENT 3.5 @@ -319742,7 +320722,7 @@ CLI Example: .sp .nf .ft C -salt \(aq*\(aq file.chpgrp c:\etemp\etest.txt administrators +salt \(aq*\(aq file.chgrp c:\etemp\etest.txt administrators .ft P .fi .UNINDENT @@ -324707,6 +325687,14 @@ User .sp Default is \fBMachine\fP +.UNINDENT +.TP +.B Raises +.INDENT 7.0 +.IP \(bu 2 +\fI\%SaltInvocationError\fP \-\- Invalid policy_class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure .UNINDENT .TP .B Returns @@ -324715,9 +325703,6 @@ None: Key/value not present .TP .B Return type \fI\%bool\fP -.TP -.B Raises -\fI\%SaltInvocationError\fP \-\- Invalid policy_class .UNINDENT .sp CLI Example: @@ -324763,6 +325748,14 @@ User .sp Default is \fBMachine\fP +.UNINDENT +.TP +.B Raises +.INDENT 7.0 +.IP \(bu 2 +\fI\%SaltInvocationError\fP \-\- Invalid policy_class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure .UNINDENT .TP .B Returns @@ -324771,9 +325764,6 @@ None: If already disabled .TP .B Return type \fI\%bool\fP -.TP -.B Raises -\fI\%SaltInvocationError\fP \-\- Invalid policy_class .UNINDENT .sp CLI Example: @@ -324969,12 +325959,6 @@ Default is \fBMachine\fP .UNINDENT .TP -.B Returns -\fBTrue\fP if successful, otherwise \fBFalse\fP -.TP -.B Return type -\fI\%bool\fP -.TP .B Raises .INDENT 7.0 .IP \(bu 2 @@ -324984,6 +325968,12 @@ Default is \fBMachine\fP .IP \(bu 2 \fI\%SaltInvocationError\fP \-\- v_data doesn\(aqt match v_type .UNINDENT +.TP +.B Returns +\fBTrue\fP if successful, otherwise \fBFalse\fP +.TP +.B Return type +\fI\%bool\fP .UNINDENT .sp CLI Example: @@ -325032,10 +326022,18 @@ Default is \fBMachine\fP .UNINDENT .TP .B Raises +.INDENT 7.0 +.IP \(bu 2 \fI\%SaltInvocationError\fP \-\- Invalid policy class +.IP \(bu 2 +\fI\%CommandExecutionError\fP \-\- On failure +.UNINDENT .TP .B Returns -None +True if successful +.TP +.B Return type +\fI\%bool\fP .UNINDENT .sp CLI Example: @@ -326424,6 +327422,38 @@ salt \(aq*\(aq pkg.list_upgrades .UNINDENT .INDENT 0.0 .TP +.B salt.modules.win_pkg.normalize_name(name) +Nothing to do on Windows. We need this function so that Salt doesn\(aqt go +through every module looking for \fBpkg.normalize_name\fP\&. +.sp +New in version 3006.0. + +.INDENT 7.0 +.TP +.B Parameters +\fBname\fP (\fI\%str\fP) \-\- The name of the package +.TP +.B Returns +The name of the package +.TP +.B Return type +\fI\%str\fP +.UNINDENT +.sp +CLI Example: +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +salt \(aq*\(aq pkg.normalize_name git +.ft P +.fi +.UNINDENT +.UNINDENT +.UNINDENT +.INDENT 0.0 +.TP .B salt.modules.win_pkg.purge(name=None, pkgs=None, **kwargs) Package purges are not supported on Windows, this function is identical to \fBremove()\fP\&. @@ -328875,6 +329905,9 @@ values is returned. .sp Changed in version 2018.3.0: The service name can now be a glob (e.g. \fBsalt*\fP) +.sp +Changed in version 3006.0: Returns \(dqNot Found\(dq if the service is not found on the system + .INDENT 7.0 .TP .B Parameters @@ -328883,6 +329916,7 @@ Changed in version 2018.3.0: The service name can now be a glob (e.g. \fBsalt*\f .B Returns True if running, False otherwise dict: Maps service name to True if running, False otherwise +str: Not Found if the service is not found on the system .TP .B Return type \fI\%bool\fP @@ -331300,7 +332334,7 @@ existing account. Default is \fBFalse\fP \fBTrue\fP will restart the computer after a successful join. Default is \fBFalse\fP .sp -New in version 2015.8.2/2015.5.7. +New in version 2015.5.7,2015.8.2. .UNINDENT @@ -332001,7 +333035,7 @@ be \fBNone\fP\&. .sp The workgroup to join the computer to. Default is \fBWORKGROUP\fP .sp -New in version 2015.8.2/2015.5.7. +New in version 2015.5.7,2015.8.2. .IP \(bu 2 @@ -332013,7 +333047,7 @@ Default is \fBFalse\fP \fBTrue\fP will restart the computer after successful unjoin. Default is \fBFalse\fP .sp -New in version 2015.8.2/2015.5.7. +New in version 2015.5.7,2015.8.2. .UNINDENT @@ -337178,8 +338212,8 @@ When encoding a certificate as \fBpkcs12\fP, a name for the certificate can be i Instead of returning the certificate, write it to this file path. .TP .B overwrite -If \fBpath\fP is specified and the file exists, do not overwrite it. -Defaults to false. +If \fBpath\fP is specified and the file exists, overwrite it. +Defaults to true. .TP .B raw Return the encoded raw bytes instead of a string. Defaults to false. @@ -337636,7 +338670,7 @@ Available: \fBrsa\fP, \fBec\fP, \fBed25519\fP, \fBed448\fP\&. Defaults to \fBrsa .B keysize For \fBrsa\fP, specifies the bitlength of the private key (2048, 3072, 4096). For \fBec\fP, specifies the NIST curve to use (256, 384, 521). -Irrelevant for Edwards\-curve schemes (\fIed25519\(ga\fP, \fBed448\fP). +Irrelevant for Edwards\-curve schemes (\fBed25519\fP, \fBed448\fP). Defaults to 2048 for RSA and 256 for EC. .TP .B passphrase @@ -337789,7 +338823,7 @@ Return the encoded raw bytes instead of a string. Defaults to false. .UNINDENT .INDENT 0.0 .TP -.B salt.modules.x509_v2.encode_private_key(private_key, encoding=\(aqpem\(aq, passphrase=None, pkcs12_encryption_compat=False, raw=False) +.B salt.modules.x509_v2.encode_private_key(private_key, encoding=\(aqpem\(aq, passphrase=None, private_key_passphrase=None, pkcs12_encryption_compat=False, raw=False) Create an encoded representation of a private key. .sp CLI Example: @@ -337805,7 +338839,7 @@ salt \(aq*\(aq x509.encode_private_key /etc/pki/my.key der .UNINDENT .INDENT 7.0 .TP -.B csr +.B private_key The private key to encode. .TP .B encoding @@ -337813,6 +338847,24 @@ Specify the encoding of the resulting private key. It can be returned as a \fBpem\fP string, base64\-encoded \fBder\fP and base64\-encoded \fBpkcs12\fP\&. Defaults to \fBpem\fP\&. .TP +.B passphrase +If this is specified, the private key will be encrypted using this +passphrase. The encryption algorithm cannot be selected, it will be +determined automatically as the best available one. +.TP +.B private_key_passphrase +New in version 3006.2. + +.sp +If the current \fBprivate_key\fP is encrypted, the passphrase to +decrypt it. +.TP +.B pkcs12_encryption_compat +Some operating systems are incompatible with the encryption defaults +for PKCS12 used since OpenSSL v3. This switch triggers a fallback to +\fBPBESv1SHA1And3KeyTripleDESCBC\fP\&. +Please consider the \fI\%notes on PKCS12 encryption\fP\&. +.TP .B raw Return the encoded raw bytes instead of a string. Defaults to false. .UNINDENT @@ -339868,7 +340920,7 @@ salt \(aq*\(aq pkg.file_list This function is an alias of \fBlist_holds\fP\&. .INDENT 7.0 .INDENT 3.5 -Changed in version 2016.3.0,2015.8.4,2015.5.10: Function renamed from \fBpkg.get_locked_pkgs\fP to \fBpkg.list_holds\fP\&. +Changed in version 2015.5.10,2015.8.4,2016.3.0: Function renamed from \fBpkg.get_locked_pkgs\fP to \fBpkg.list_holds\fP\&. .sp List information on locked packages @@ -339936,16 +340988,47 @@ salt \(aq*\(aq pkg.get_repo myrepo basedir=/path/to/dir,/path/to/another/dir .UNINDENT .INDENT 0.0 .TP -.B salt.modules.yumpkg.group_diff(name) +.B salt.modules.yumpkg.group_diff(name, **kwargs) New in version 2014.1.0. .sp -Changed in version 2016.3.0,2015.8.4,2015.5.10: Environment groups are now supported. The key names have been renamed, +Changed in version 2015.5.10,2015.8.4,2016.3.0: Environment groups are now supported. The key names have been renamed, similar to the changes made in \fI\%pkg.group_info\fP\&. +.sp +Changed in version 3006.2: Support for \fBfromrepo\fP, \fBenablerepo\fP, and \fBdisablerepo\fP (as used +in \fI\%pkg.install\fP) has been +added. + .sp Lists which of a group\(aqs packages are installed and which are not installed +.INDENT 7.0 +.TP +.B name +The name of the group to check +.TP +.B fromrepo +Restrict \fByum groupinfo\fP to the specified repo(s). +(e.g., \fByum \-\-disablerepo=\(aq*\(aq \-\-enablerepo=\(aqsomerepo\(aq\fP) +.sp +New in version 3006.2. + +.TP +.B enablerepo (ignored if \fBfromrepo\fP is specified) +Specify a disabled package repository (or repositories) to enable. +(e.g., \fByum \-\-enablerepo=\(aqsomerepo\(aq\fP) +.sp +New in version 3006.2. + +.TP +.B disablerepo (ignored if \fBfromrepo\fP is specified) +Specify an enabled package repository (or repositories) to disable. +(e.g., \fByum \-\-disablerepo=\(aqsomerepo\(aq\fP) +.sp +New in version 3006.2. + +.UNINDENT .sp CLI Example: .INDENT 7.0 @@ -339954,6 +341037,8 @@ CLI Example: .nf .ft C salt \(aq*\(aq pkg.group_diff \(aqPerl Support\(aq +salt \(aq*\(aq pkg.group_diff \(aqPerl Support\(aq fromrepo=base,updates +salt \(aq*\(aq pkg.group_diff \(aqPerl Support\(aq enablerepo=somerepo .ft P .fi .UNINDENT @@ -339961,11 +341046,11 @@ salt \(aq*\(aq pkg.group_diff \(aqPerl Support\(aq .UNINDENT .INDENT 0.0 .TP -.B salt.modules.yumpkg.group_info(name, expand=False, ignore_groups=None) +.B salt.modules.yumpkg.group_info(name, expand=False, ignore_groups=None, **kwargs) New in version 2014.1.0. .sp -Changed in version 3001,2016.3.0,2015.8.4,2015.5.10: The return data has changed. A new key \fBtype\fP has been added to +Changed in version 2015.5.10,2015.8.4,2016.3.0,3001: The return data has changed. A new key \fBtype\fP has been added to distinguish environment groups from package groups. Also, keys for the group name and group ID have been added. The \fBmandatory packages\fP, \fBoptional packages\fP, and \fBdefault packages\fP keys have been renamed @@ -339973,6 +341058,11 @@ to \fBmandatory\fP, \fBoptional\fP, and \fBdefault\fP for accuracy, as environment groups include other groups, and not packages. Finally, this function now properly identifies conditional packages. +.sp +Changed in version 3006.2: Support for \fBfromrepo\fP, \fBenablerepo\fP, and \fBdisablerepo\fP (as used +in \fI\%pkg.install\fP) has been +added. + .sp Lists packages belonging to a certain group .INDENT 7.0 @@ -339997,6 +341087,27 @@ expanding the same group multiple times. .sp New in version 3001. +.TP +.B fromrepo +Restrict \fByum groupinfo\fP to the specified repo(s). +(e.g., \fByum \-\-disablerepo=\(aq*\(aq \-\-enablerepo=\(aqsomerepo\(aq\fP) +.sp +New in version 3006.2. + +.TP +.B enablerepo (ignored if \fBfromrepo\fP is specified) +Specify a disabled package repository (or repositories) to enable. +(e.g., \fByum \-\-enablerepo=\(aqsomerepo\(aq\fP) +.sp +New in version 3006.2. + +.TP +.B disablerepo (ignored if \fBfromrepo\fP is specified) +Specify an enabled package repository (or repositories) to disable. +(e.g., \fByum \-\-disablerepo=\(aqsomerepo\(aq\fP) +.sp +New in version 3006.2. + .UNINDENT .sp CLI Example: @@ -340006,6 +341117,8 @@ CLI Example: .nf .ft C salt \(aq*\(aq pkg.group_info \(aqPerl Support\(aq +salt \(aq*\(aq pkg.group_info \(aqPerl Support\(aq fromrepo=base,updates +salt \(aq*\(aq pkg.group_info \(aqPerl Support\(aq enablerepo=somerepo .ft P .fi .UNINDENT @@ -340590,7 +341703,7 @@ salt \(aq*\(aq pkg.list_downloaded .INDENT 0.0 .TP .B salt.modules.yumpkg.list_holds(pattern=\(aq[\e\ew+]+(?:[.\-][^\-]+)*\(aq, full=True) -Changed in version 2016.3.0,2015.8.4,2015.5.10: Function renamed from \fBpkg.get_locked_pkgs\fP to \fBpkg.list_holds\fP\&. +Changed in version 2015.5.10,2015.8.4,2016.3.0: Function renamed from \fBpkg.get_locked_pkgs\fP to \fBpkg.list_holds\fP\&. .sp List information on locked packages @@ -341458,7 +342571,7 @@ Specify an enabled package repository (or repositories) to disable. Disable exclude from main, for a repo or for everything. (e.g., \fByum \-\-disableexcludes=\(aqmain\(aq\fP) .sp -New in version 2014.7. +New in version 2014.7.0. .TP .B name @@ -341802,7 +342915,7 @@ salt \(aq*\(aq zabbix.apiinfo_version .INDENT 0.0 .TP .B salt.modules.zabbix.compare_params(defined, existing, return_old_value=False) -New in version 2017.7. +New in version 2017.7.0. .sp Compares Zabbix object definition against existing Zabbix object. @@ -341838,7 +342951,7 @@ salt \(aq*\(aq zabbix.compare_params new_zabbix_object_dict existing_zabbix_onje .INDENT 0.0 .TP .B salt.modules.zabbix.configuration_import(config_file, rules=None, file_format=\(aqxml\(aq, **connection_args) -New in version 2017.7. +New in version 2017.7.0. .sp Imports Zabbix configuration specified in file to Zabbix server. @@ -341876,7 +342989,7 @@ salt \(aq*\(aq zabbix.configuration_import salt://zabbix/config/zabbix_templates .INDENT 0.0 .TP .B salt.modules.zabbix.get_object_id_by_params(obj, params=None, **connection_args) -New in version 2017.7. +New in version 2017.7.0. .sp Get ID of single Zabbix object specified by its name. @@ -341915,7 +343028,7 @@ salt \(aq*\(aq zabbix.get_object_id_by_params object_type params=zabbix_api_quer .INDENT 0.0 .TP .B salt.modules.zabbix.get_zabbix_id_mapper() -New in version 2017.7. +New in version 2017.7.0. .sp Make ZABBIX_ID_MAPPER constant available to state modules. @@ -342957,7 +344070,7 @@ salt \(aq*\(aq zabbix.run_query proxy.create \(aq{\(dqhost\(dq: \(dqzabbixproxy. .INDENT 0.0 .TP .B salt.modules.zabbix.substitute_params(input_object, extend_params=None, filter_key=\(aqname\(aq, **kwargs) -New in version 2017.7. +New in version 2017.7.0. .sp Go through Zabbix object params specification and if needed get given object ID from Zabbix API and put it back @@ -351239,7 +352352,7 @@ Takes no arguments, returns a dictionary of metadata values from Azure. Grains from cloud metadata servers at 169.254.169.254 in google compute engine .sp -New in version 3005.0. +New in version 3005. .INDENT 0.0 .TP @@ -353215,7 +354328,7 @@ curl \-sSi localhost:8000/minions \e POST /minions HTTP/1.1 Host: localhost:8000 Accept: application/x\-yaml -Content\-Type: application/json +Content\-Type: application/x\-www\-form\-urlencoded tgt=*&fun=status.diskusage .ft P @@ -360875,32 +361988,32 @@ Whether should retrieve details of the site the device belongs to. Whether should retrieve the prefixes of the site the device belongs to. .TP .B devices: \fBTrue\fP -New in version 3004.0. +New in version 3004. .sp Whether should retrieve physical devices. .TP .B virtual_machines: \fBFalse\fP -New in version 3004.0. +New in version 3004. .sp Whether should retrieve virtual machines. .TP .B interfaces: \fBFalse\fP -New in version 3004.0. +New in version 3004. .sp Whether should retrieve the interfaces of the device. .TP .B interface_ips: \fBFalse\fP -New in version 3004.0. +New in version 3004. .sp Whether should retrieve the IP addresses for interfaces of the device. (interfaces must be set to True as well) .TP .B api_query_result_limit: \fBUse NetBox default\fP -New in version 3004.0. +New in version 3004. .sp An integer specifying how many results should be returned for each query @@ -369486,16 +370599,16 @@ Flat inventory files should be in the regular ansible inventory format. .ft C # /tmp/example_roster [servers] -salt.gtmanfred.com ansible_ssh_user=gtmanfred ansible_ssh_host=127.0.0.1 ansible_ssh_port=22 ansible_ssh_pass=\(aqpassword\(aq +salt.gtmanfred.com ansible_ssh_user=gtmanfred ansible_ssh_host=127.0.0.1 ansible_ssh_port=22 ansible_ssh_pass=\(aqpassword\(aq ansible_sudo_pass=\(aqpassword\(aq [desktop] -home ansible_ssh_user=gtmanfred ansible_ssh_host=12.34.56.78 ansible_ssh_port=23 ansible_ssh_pass=\(aqpassword\(aq +home ansible_ssh_user=gtmanfred ansible_ssh_host=12.34.56.78 ansible_ssh_port=23 ansible_ssh_pass=\(aqpassword\(aq ansible_sudo_pass=\(aqpassword\(aq [computers:children] desktop servers -[names:vars] +[computers:vars] http_port=80 .ft P .fi @@ -369538,35 +370651,40 @@ There is also the option of specifying a dynamic inventory, and generating it on #!/bin/bash # filename: /etc/salt/hosts echo \(aq{ - \(dqservers\(dq: [ - \(dqsalt.gtmanfred.com\(dq - ], - \(dqdesktop\(dq: [ - \(dqhome\(dq - ], - \(dqcomputers\(dq: { - \(dqhosts\(dq: [], - \(dqchildren\(dq: [ - \(dqdesktop\(dq, - \(dqservers\(dq - ] - }, - \(dq_meta\(dq: { - \(dqhostvars\(dq: { - \(dqsalt.gtmanfred.com\(dq: { - \(dqansible_ssh_user\(dq: \(dqgtmanfred\(dq, - \(dqansible_ssh_host\(dq: \(dq127.0.0.1\(dq, - \(dqansible_sudo_pass\(dq: \(dqpassword\(dq, - \(dqansible_ssh_port\(dq: 22 - }, - \(dqhome\(dq: { - \(dqansible_ssh_user\(dq: \(dqgtmanfred\(dq, - \(dqansible_ssh_host\(dq: \(dq12.34.56.78\(dq, - \(dqansible_sudo_pass\(dq: \(dqpassword\(dq, - \(dqansible_ssh_port\(dq: 23 - } + \(dqservers\(dq: [ + \(dqsalt.gtmanfred.com\(dq + ], + \(dqdesktop\(dq: [ + \(dqhome\(dq + ], + \(dqcomputers\(dq: { + \(dqhosts\(dq: [], + \(dqchildren\(dq: [ + \(dqdesktop\(dq, + \(dqservers\(dq + ], + \(dqvars\(dq: { + \(dqhttp_port\(dq: 80 + } + }, + \(dq_meta\(dq: { + \(dqhostvars\(dq: { + \(dqsalt.gtmanfred.com\(dq: { + \(dqansible_ssh_user\(dq: \(dqgtmanfred\(dq, + \(dqansible_ssh_host\(dq: \(dq127.0.0.1\(dq, + \(dqansible_sudo_pass\(dq: \(dqpassword\(dq, + \(dqansible_ssh_pass\(dq: \(dqpassword\(dq, + \(dqansible_ssh_port\(dq: 22 + }, + \(dqhome\(dq: { + \(dqansible_ssh_user\(dq: \(dqgtmanfred\(dq, + \(dqansible_ssh_host\(dq: \(dq12.34.56.78\(dq, + \(dqansible_sudo_pass\(dq: \(dqpassword\(dq, + \(dqansible_ssh_pass\(dq: \(dqpassword\(dq, + \(dqansible_ssh_port\(dq: 23 + } + } } - } }\(aq .ft P .fi @@ -373424,7 +374542,7 @@ detection (no commands will be sent to minions) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373458,7 +374576,7 @@ detection (no commands will be sent to minions) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373651,7 +374769,7 @@ detection (no commands will be sent to minions) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373717,7 +374835,7 @@ detection (no commands will be sent to minions) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373751,7 +374869,7 @@ detection (no commands will be sent to minions) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373785,7 +374903,7 @@ detection (no commands will be sent) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373819,7 +374937,7 @@ detection (no commands will be sent) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373853,7 +374971,7 @@ detection (no commands will be sent) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373887,7 +375005,7 @@ detection (no commands will be sent) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373921,7 +375039,7 @@ detection (no commands will be sent) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373952,7 +375070,7 @@ detection (no commands will be sent to minions) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -373986,7 +375104,7 @@ detection (no commands will be sent to minions) .TP .B subset None -Pass in a CIDR range to filter minions by IP address. +Pass in a list of minion ids. .TP .B show_ip False @@ -374222,7 +375340,7 @@ This module helps include encrypted passwords in pillars, grains and salt state .INDENT 0.0 .TP .B depends -libnacl, \fI\%https://github.com/saltstack/libnacl\fP +PyNaCl, \fI\%https://github.com/pyca/pynacl\fP .UNINDENT .sp This is often useful if you wish to store your pillars in source control or @@ -374457,7 +375575,7 @@ salt\-run nacl.enc_file name=/tmp/id_rsa box_type=secretbox sk_file= .INDENT 0.0 .TP .B salt.runners.nacl.keygen(sk_file=None, pk_file=None, **kwargs) -Use libnacl to generate a keypair. +Use PyNaCL to generate a keypair. .sp If no \fIsk_file\fP is defined return a keypair. .sp @@ -377405,7 +378523,7 @@ Changed in version 2014.1.1: Runner renamed from \fBstate.sls\fP to \fBstate.orc Changed in version 2014.7.0: Runner uses the pillar variable .sp -Changed in version 2017.5: Runner uses the pillar_enc variable that allows renderers to render the pillar. +Changed in version 2017.5.0: Runner uses the pillar_enc variable that allows renderers to render the pillar. This is usable when supplying the contents of a file as pillar, and the file contains gpg\-encrypted entries. @@ -377505,7 +378623,7 @@ Changed in version 2014.1.1: Runner renamed from \fBstate.sls\fP to \fBstate.orc Changed in version 2014.7.0: Runner uses the pillar variable .sp -Changed in version 2017.5: Runner uses the pillar_enc variable that allows renderers to render the pillar. +Changed in version 2017.5.0: Runner uses the pillar_enc variable that allows renderers to render the pillar. This is usable when supplying the contents of a file as pillar, and the file contains gpg\-encrypted entries. @@ -377682,7 +378800,7 @@ Changed in version 2014.1.1: Runner renamed from \fBstate.sls\fP to \fBstate.orc Changed in version 2014.7.0: Runner uses the pillar variable .sp -Changed in version 2017.5: Runner uses the pillar_enc variable that allows renderers to render the pillar. +Changed in version 2017.5.0: Runner uses the pillar_enc variable that allows renderers to render the pillar. This is usable when supplying the contents of a file as pillar, and the file contains gpg\-encrypted entries. @@ -380119,12 +381237,12 @@ It also use C bindings if they are available. .INDENT 0.0 .TP .B salt.serializers.yaml.BaseDumper -alias of \fBSafeDumper\fP +alias of \fBCSafeDumper\fP .UNINDENT .INDENT 0.0 .TP .B salt.serializers.yaml.BaseLoader -alias of \fBSafeLoader\fP +alias of \fBCSafeLoader\fP .UNINDENT .INDENT 0.0 .TP @@ -380397,7 +381515,7 @@ alias of \fBSafeDumper\fP .INDENT 0.0 .TP .B salt.serializers.yamlex.BaseLoader -alias of \fBSafeLoader\fP +alias of \fBCSafeLoader\fP .UNINDENT .INDENT 0.0 .TP @@ -380968,7 +382086,7 @@ _ T{ \fI\%chocolatey\fP T} T{ -Manage Chocolatey package installs . +Manage Windows Packages using Chocolatey . T} _ T{ @@ -382670,7 +383788,7 @@ _ T{ \fI\%zabbix_template\fP T} T{ -New in version 2017.7. +New in version 2017.7.0. T} _ T{ @@ -382743,7 +383861,7 @@ _ .SS salt.states.acme .SS ACME / Let\(aqs Encrypt certificate management state .sp -New in version 2016.3. +New in version 2016.3.0. .sp See also the module documentation @@ -383142,7 +384260,7 @@ the above word between angle brackets (<>). .UNINDENT .UNINDENT .sp -Changed in version 2018.3. +Changed in version 2018.3.0. .sp Allows having the same section container multiple times (e.g. ). @@ -399433,7 +400551,7 @@ User to set privilege to .UNINDENT .SS salt.states.chocolatey .sp -Manage Chocolatey package installs +Manage Windows Packages using Chocolatey \&.. versionadded:: 2016.3.0 .sp \fBNOTE:\fP @@ -399456,49 +400574,54 @@ Installs a package if not already installed .IP \(bu 2 \fBname\fP (\fI\%str\fP) \-\- The name of the package to be installed. Required. .IP \(bu 2 -\fBversion\fP (\fI\%str\fP) \-\- Install a specific version of the package. Defaults to latest -version. If the version is different to the one installed then the -specified version will be installed. Default is None. +\fBversion\fP (\fI\%str\fP) \-\- Install a specific version of the package. Defaults to the latest +version. If the version is different to the one installed, then the +specified version will be installed. Default is \fBNone\fP\&. .IP \(bu 2 \fBsource\fP (\fI\%str\fP) \-\- Chocolatey repository (directory, share or remote URL, feed). -Defaults to the official Chocolatey feed. Default is None. +\fBNone\fP defaults to the official Chocolatey feed. Default is +\fBNone\fP\&. .IP \(bu 2 \fBforce\fP (\fI\%bool\fP) \-\- Reinstall the current version of an existing package. Do not use -with \fBallow_multiple\fP\&. Default is False. +with \fBallow_multiple\fP\&. Default is \fBFalse\fP\&. .IP \(bu 2 -\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages. Default is False. +\fBpre_versions\fP (\fI\%bool\fP) \-\- Include pre\-release packages. Default is \fBFalse\fP\&. .IP \(bu 2 -\fBinstall_args\fP (\fI\%str\fP) \-\- Install arguments you want to pass to the installation process, i.e -product key or feature list. Default is None. +\fBinstall_args\fP (\fI\%str\fP) \-\- Install arguments you want to pass to the installation process, i.e. +product key or feature list. Default is \fBNone\fP\&. .IP \(bu 2 -\fBoverride_args\fP (\fI\%bool\fP) \-\- Set to True if you want to override the original install arguments -(for the native installer) in the package and use your own. When -this is set to False install_args will be appended to the end of the -default arguments. Default is False. +\fBoverride_args\fP (\fI\%bool\fP) \-\- Set to \fBTrue\fP to override the original install arguments (for the +native installer) in the package and use your own. When this is set +to \fBFalse\fP, install_args will be appended to the end of the +default arguments. Default is \fBFalse\fP\&. .IP \(bu 2 -\fBforce_x86\fP (\fI\%bool\fP) \-\- Force x86 (32bit) installation on 64 bit systems. Default is False. +\fBforce_x86\fP (\fI\%bool\fP) \-\- Force x86 (32bit) installation on 64bit systems. Default is +\fBFalse\fP\&. .IP \(bu 2 -\fBpackage_args\fP (\fI\%str\fP) \-\- Arguments you want to pass to the package. Default is None. +\fBpackage_args\fP (\fI\%str\fP) \-\- Arguments you want to pass to the package. Default is \fBNone\fP\&. .IP \(bu 2 \fBallow_multiple\fP (\fI\%bool\fP) \-\- .sp -Allow mulitiple versions of the package to be installed. Do not use -with \fBforce\fP\&. Does not work with all packages. Default is False. +Allow multiple versions of the package to be installed. Do not use +with \fBforce\fP\&. Does not work with all packages. Default is +\fBFalse\fP\&. .sp New in version 2017.7.0. .IP \(bu 2 \fBexecution_timeout\fP (\fI\%str\fP) \-\- Chocolatey execution timeout value you want to pass to the -installation process. Default is None. +installation process. Default is \fBNone\fP\&. .UNINDENT .UNINDENT +.sp +Example: .INDENT 7.0 .INDENT 3.5 .sp .nf .ft C -Installsomepackage: +install_some_package: chocolatey.installed: \- name: packagename \- version: \(aq12.04\(aq @@ -399512,35 +400635,33 @@ Installsomepackage: .INDENT 0.0 .TP .B salt.states.chocolatey.source_present(name, source_location, username=None, password=None, force=False, priority=None) -Instructs Chocolatey to add a source if not already present. +Adds a Chocolatey source if not already present. .INDENT 7.0 .TP -.B name -The name of the source to be added as a chocolatey repository. -.TP -.B source -Location of the source you want to work with. -.TP -.B username -Provide username for chocolatey sources that need authentication +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the source to be added as a chocolatey repository. +.IP \(bu 2 +\fBsource\fP (\fI\%str\fP) \-\- Location of the source you want to work with. +.IP \(bu 2 +\fBusername\fP (\fI\%str\fP) \-\- The username for a chocolatey source that needs authentication credentials. -.TP -.B password -Provide password for chocolatey sources that need authentication +.IP \(bu 2 +\fBpassword\fP (\fI\%str\fP) \-\- The password for a chocolatey source that needx authentication credentials. -.TP -.B force -Salt will not modify a existing repository with the same name. Set this -option to true to update an existing repository. -.TP -.B priority -The priority order of this source as compared to other sources, -lower is better. Defaults to 0 (no priority). All priorities +.IP \(bu 2 +\fBforce\fP (\fI\%bool\fP) \-\- Salt will not modify an existing repository with the same name. Set +this option to \fBTrue\fP to update an existing repository. +.IP \(bu 2 +\fBpriority\fP (\fI\%int\fP) \-\- The priority order of this source as compared to other sources. +Lower is better. Defaults to 0 (no priority). All priorities above 0 will be evaluated first, then zero\-based values will be evaluated in config file order. .UNINDENT +.UNINDENT .sp -CLI Example: +Example: .INDENT 7.0 .INDENT 3.5 .sp @@ -399561,32 +400682,34 @@ add_some_source: .INDENT 0.0 .TP .B salt.states.chocolatey.uninstalled(name, version=None, uninstall_args=None, override_args=False) -Uninstalls a package +Uninstalls a chocolatey package .INDENT 7.0 .TP -.B name -The name of the package to be uninstalled -.TP -.B version -Uninstalls a specific version of the package. Defaults to latest +.B Parameters +.INDENT 7.0 +.IP \(bu 2 +\fBname\fP (\fI\%str\fP) \-\- The name of the package to be uninstalled. Required. +.IP \(bu 2 +\fBversion\fP (\fI\%str\fP) \-\- Uninstalls a specific version of the package. Defaults to the latest version installed. -.TP -.B uninstall_args -A list of uninstall arguments you want to pass to the uninstallation -process i.e product key or feature list -.TP -.B override_args -Set to true if you want to override the original uninstall arguments ( -for the native uninstaller)in the package and use your own. -When this is set to False uninstall_args will be appended to the end of -the default arguments +.IP \(bu 2 +\fBuninstall_args\fP (\fI\%str\fP) \-\- A list of uninstall arguments you want to pass to the uninstallation +process, i.e. product key or feature list +.IP \(bu 2 +\fBoverride_args\fP (\fI\%str\fP) \-\- Set to \fBTrue\fP if you want to override the original uninstall +arguments (for the native uninstaller) in the package and use your +own. When this is set to \fBFalse\fP, uninstall_args will be appended +to the end of the default arguments +.UNINDENT .UNINDENT +.sp +Example: .INDENT 7.0 .INDENT 3.5 .sp .nf .ft C -Removemypackage: +remove_my_package: chocolatey.uninstalled: \- name: mypackage \- version: \(aq21.5\(aq @@ -399598,7 +400721,7 @@ Removemypackage: .INDENT 0.0 .TP .B salt.states.chocolatey.upgraded(name, version=None, source=None, force=False, pre_versions=False, install_args=None, override_args=False, force_x86=False, package_args=None) -Upgrades a package. Will install the package if not installed. +Upgrades a chocolatey package. Will install the package if not installed. .sp New in version 2018.3.0. @@ -399619,7 +400742,7 @@ Defaults to the official Chocolatey feed. Default is \fBNone\fP\&. \fBforce\fP (\fI\%bool\fP) \-\- \fBTrue\fP will reinstall an existing package with the same version. Default is \fBFalse\fP\&. .IP \(bu 2 -\fBpre_versions\fP (\fI\%bool\fP) \-\- \fBTrue\fP will nclude pre\-release packages. Default is \fBFalse\fP\&. +\fBpre_versions\fP (\fI\%bool\fP) \-\- \fBTrue\fP will include pre\-release packages. Default is \fBFalse\fP\&. .IP \(bu 2 \fBinstall_args\fP (\fI\%str\fP) \-\- Install arguments you want to pass to the installation process, i.e product key or feature list. Default is \fBNone\fP\&. @@ -399629,12 +400752,14 @@ native installer) in the package and use those specified in \fBinstall_args\fP\&. \fBFalse\fP will append install_args to the end of the default arguments. Default is \fBFalse\fP\&. .IP \(bu 2 -\fBforce_x86\fP (\fI\%bool\fP) \-\- \fBTrue\fP forces 32bit installation on 64 bit systems. Default is +\fBforce_x86\fP (\fI\%bool\fP) \-\- \fBTrue\fP forces 32bit installation on 64bit systems. Default is \fBFalse\fP\&. .IP \(bu 2 \fBpackage_args\fP (\fI\%str\fP) \-\- Arguments you want to pass to the package. Default is \fBNone\fP\&. .UNINDENT .UNINDENT +.sp +Example: .INDENT 7.0 .INDENT 3.5 .sp @@ -405949,6 +407074,9 @@ specified either using \fBrepo:tag\fP notation, or just the repo name (in which case a tag of \fBlatest\fP is assumed). .INDENT 7.0 .TP +.B name +The name of the docker image. +.TP .B images Run this state on more than one image at a time. The following two examples accomplish the same thing: @@ -405988,7 +407116,6 @@ all the deletions in a single run, rather than executing the state separately on each image (as it would in the first example). .TP .B force -False Salt will fail to remove any images currently in use by a container. Set this option to true to remove the image even if it is already present. @@ -406065,6 +407192,9 @@ myuser/myimage: .UNINDENT .INDENT 7.0 .TP +.B name +The name of the docker image. +.TP .B tag Tag name for the image. Required when using \fBbuild\fP, \fBload\fP, or \fBsls\fP to create the image, but optional if pulling from a repository. @@ -406124,10 +407254,13 @@ Changed in version 2018.3.0: The \fBtag\fP must be manually specified using the .TP .B force -False Set this parameter to \fBTrue\fP to force Salt to pull/build/load the image even if it is already present. .TP +.B insecure_registry +If \fBTrue\fP, the Docker client will permit the use of insecure +(non\-HTTPS) registries. +.TP .B client_timeout Timeout in seconds for the Docker client. This is not a timeout for the state, but for receiving a response from the API. @@ -406206,6 +407339,10 @@ Values passed this way will override Pillar values set via .sp New in version 2018.3.0. +.TP +.B kwargs +Additional keyword arguments to pass to +\fI\%docker.build\fP .UNINDENT .UNINDENT .SS salt.states.docker_network @@ -412027,6 +413164,8 @@ tomdroid\-src\-0.7.3.tar.gz: .fi .UNINDENT .UNINDENT +.sp +source_hash is ignored if the file hosted is not on a HTTP, HTTPS or FTP server. .UNINDENT .UNINDENT .INDENT 7.0 @@ -414087,7 +415226,7 @@ New in version 3006.0. .INDENT 0.0 .TP .B salt.states.file.tidied(name, age=0, matches=None, rmdirs=False, size=0, exclude=None, full_path_match=False, followlinks=False, time_comparison=\(aqatime\(aq, age_size_logical_operator=\(aqOR\(aq, age_size_only=None, rmlinks=True, **kwargs) -Changed in version 3006.0,3005. +Changed in version 3005,3006.0. .sp Remove unwanted files based on specific criteria. @@ -424514,7 +425653,7 @@ New in version 2018.3.0. Assume yes to all prompts .UNINDENT .sp -New in version 3002.0. +New in version 3002. .INDENT 7.0 .TP @@ -432989,13 +434128,13 @@ name of the bridge name of the parent bridge (if the bridge shall be created as a fake bridge). If specified, vlan must also be specified. .IP \(bu 2 -\fBversionadded:\fP (\fI\&..\fP) \-\- 3006: +\fBversionadded:\fP (\fI\&..\fP) \-\- 3006.0: .IP \(bu 2 \fBvlan\fP \-\- int VLAN ID of the bridge (if the bridge shall be created as a fake bridge). If specified, parent must also be specified. .IP \(bu 2 -\fBversionadded:\fP \-\- 3006: +\fBversionadded:\fP \-\- 3006.0: .UNINDENT .UNINDENT .UNINDENT @@ -433003,7 +434142,7 @@ bridge). If specified, parent must also be specified. .sp Management of Open vSwitch database records. .sp -New in version 3006. +New in version 3006.0. .INDENT 0.0 .TP @@ -434396,7 +435535,7 @@ List of dict representation of the required storage policies A state module to manage Pacemaker/Corosync clusters with the Pacemaker/Corosync configuration system (PCS) .sp -New in version 2016.110. +New in version 2016.11.0. .INDENT 0.0 .TP @@ -435984,10 +437123,15 @@ New in version 2015.8.0. .sp Changed in version 2016.11.0: Added support in \fBpacman\fP +.sp +Changed in version 3006.2: For RPM\-based systems, support for \fBfromrepo\fP, \fBenablerepo\fP, and +\fBdisablerepo\fP (as used in \fI\%pkg.install\fP) has been added. This allows one to, for +example, use \fBenablerepo\fP to perform a group install from a repo that +is otherwise disabled. + .sp Ensure that an entire package group is installed. This state is currently -only supported for the \fI\%yum\fP and \fBpacman\fP -package managers. +only supported for the \fI\%yum\fP and \fBpacman\fP package managers. .INDENT 7.0 .TP .B skip @@ -436029,6 +437173,72 @@ Load Balancer: Changed in version 2016.3.0: This option can no longer be passed as a comma\-separated list, it must now be passed as a list (as shown in the above example). +.UNINDENT +.sp +\fBNOTE:\fP +.INDENT 7.0 +.INDENT 3.5 +The below options are only supported on RPM\-based systems +.UNINDENT +.UNINDENT +.INDENT 7.0 +.TP +.B fromrepo +Restrict \fByum groupinfo\fP to the specified repo(s). +(e.g., \fByum \-\-disablerepo=\(aq*\(aq \-\-enablerepo=\(aqsomerepo\(aq\fP) +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +MyGroup: + pkg.group_installed: + \- fromrepo: base,updates +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +New in version 3006.2. + +.TP +.B enablerepo (ignored if \fBfromrepo\fP is specified) +Specify a disabled package repository (or repositories) to enable. +(e.g., \fByum \-\-enablerepo=\(aqsomerepo\(aq\fP) +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +MyGroup: + pkg.group_installed: + \- enablerepo: myrepo +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +New in version 3006.2. + +.TP +.B disablerepo (ignored if \fBfromrepo\fP is specified) +Specify an enabled package repository (or repositories) to disable. +(e.g., \fByum \-\-disablerepo=\(aqsomerepo\(aq\fP) +.INDENT 7.0 +.INDENT 3.5 +.sp +.nf +.ft C +MyGroup: + pkg.group_installed: + \- disablerepo: epel +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +New in version 3006.2. + .UNINDENT .sp \fBNOTE:\fP @@ -438125,6 +439335,16 @@ The name of the package repo, as it would be referred to when running the regular package manager commands. .UNINDENT .sp +\fBNOTE:\fP +.INDENT 7.0 +.INDENT 3.5 +On apt\-based systems this must be the complete source entry. For +example, if you include \fB[arch=amd64]\fP, and a repo matching the +specified URI, dist, etc. exists _without_ an architecture, then no +changes will be made and the state will report a \fBTrue\fP result. +.UNINDENT +.UNINDENT +.sp \fBFEDORA/REDHAT\-SPECIFIC OPTIONS\fP .INDENT 7.0 .TP @@ -443068,6 +444288,9 @@ The unique name that is given to the scheduled job. .TP .B persist Whether changes to the scheduled job should be saved, defaults to True. +.TP +.B offline +Delete the scheduled job to the Salt minion when the Salt minion is not running. .UNINDENT .UNINDENT .INDENT 0.0 @@ -443190,6 +444413,9 @@ date strings using the dateutil format. Requires python\-dateutil. .B run_after_skip_range Whether the scheduled job should run immediately after the skip_during_range time period ends. +.TP +.B offline +Add the scheduled job to the Salt minion when the Salt minion is not running. .UNINDENT .UNINDENT .SS salt.states.selinux @@ -443592,6 +444818,13 @@ If your service states are running into trouble with init system detection, please see the \fI\%Overriding Virtual Module Providers\fP section of Salt\(aqs module documentation to work around possible errors. .sp +For services managed by systemd, the systemd_service module includes a built\-in +feature to reload the daemon when unit files are changed or extended. This +feature is used automatically by the service state and the systemd_service +module when running on a systemd minion, so there is no need to set up your own +methods of reloading the daemon. If you need to manually reload the daemon for +some reason, you can use the \fI\%systemd_service.systemctl_reload\fP function provided by Salt. +.sp \fBNOTE:\fP .INDENT 0.0 .INDENT 3.5 @@ -454957,7 +456190,7 @@ Create CA private key: \- keysize: 4096 \- backup: true \- require: - \- file: /etc/pki + \- file: /etc/pki/issued_certs Create self\-signed CA certificate: x509.certificate_managed: @@ -455436,7 +456669,7 @@ Available: \fBrsa\fP, \fBec\fP, \fBed25519\fP, \fBed448\fP\&. Defaults to \fBrsa .B keysize For \fBrsa\fP, specifies the bitlength of the private key (2048, 3072, 4096). For \fBec\fP, specifies the NIST curve to use (256, 384, 521). -Irrelevant for Edwards\-curve schemes (\fIed25519\(ga\fP, \fBed448\fP). +Irrelevant for Edwards\-curve schemes (\fBed25519\fP, \fBed448\fP). Defaults to 2048 for RSA and 256 for EC. .TP .B passphrase @@ -455606,7 +456839,7 @@ The message to send to the XMPP user .sp Management of Zabbix Action object over Zabbix API. .sp -New in version 2017.7. +New in version 2017.7.0. .INDENT 0.0 .TP @@ -456010,7 +457243,7 @@ make_new_mediatype: .UNINDENT .SS salt.states.zabbix_template .sp -New in version 2017.7. +New in version 2017.7.0. .sp Management of Zabbix Template object over Zabbix API. @@ -456565,7 +457798,7 @@ override host usermacro: .sp Management of Zabbix Valuemap object over Zabbix API. .sp -New in version 2017.7. +New in version 2017.7.0. .INDENT 0.0 .TP @@ -456687,7 +457920,7 @@ installed2 .UNINDENT .INDENT 0.0 .TP -.B salt.states.zcbuildout.installed(name, config=\(aqbuildout.cfg\(aq, quiet=False, parts=None, user=None, env=(), buildout_ver=None, test_release=False, distribute=None, new_st=None, offline=False, newest=False, python=\(aq/opt/actions\-runner/_work/salt/salt/.tools\-venvs/docs/bin/python\(aq, debug=False, verbose=False, unless=None, onlyif=None, use_vt=False, loglevel=\(aqdebug\(aq, **kwargs) +.B salt.states.zcbuildout.installed(name, config=\(aqbuildout.cfg\(aq, quiet=False, parts=None, user=None, env=(), buildout_ver=None, test_release=False, distribute=None, new_st=None, offline=False, newest=False, python=\(aq/opt/actions\-runner/_work/salt/salt/.tools\-venvs/py3.10/docs/bin/python\(aq, debug=False, verbose=False, unless=None, onlyif=None, use_vt=False, loglevel=\(aqdebug\(aq, **kwargs) Install buildout in a specific directory .sp It is a thin wrapper to modules.buildout.buildout @@ -460254,7 +461487,7 @@ Return all of the files names in all available environments .INDENT 0.0 .TP .B salt.wheel.file_roots.read(path, saltenv=\(aqbase\(aq) -Read the contents of a text file, if the file is binary then +Read the contents of a text file, if the file is binary then ignore it .UNINDENT .INDENT 0.0 .TP @@ -460873,7 +462106,7 @@ to execute those modules instead. Each module type has a corresponding loader function. .INDENT 0.0 .TP -.B salt.loader.minion_mods(opts, context=None, utils=None, whitelist=None, initial_load=False, loaded_base_name=None, notify=False, static_modules=None, proxy=None) +.B salt.loader.minion_mods(opts, context=None, utils=None, whitelist=None, initial_load=False, loaded_base_name=None, notify=False, static_modules=None, proxy=None, file_client=None) Load execution modules .sp Returns a dictionary of execution modules appropriate for the current @@ -460966,7 +462199,7 @@ testmod[\(aqtest.ping\(aq]() .UNINDENT .INDENT 0.0 .TP -.B salt.loader.states(opts, functions, utils, serializers, whitelist=None, proxy=None, context=None, loaded_base_name=None) +.B salt.loader.states(opts, functions, utils, serializers, whitelist=None, proxy=None, context=None, loaded_base_name=None, file_client=None) Returns the state modules .INDENT 7.0 .TP @@ -463004,6 +464237,234 @@ file\-in\-user\-home: .sp This section contains details on the Windows Package Manager, and specific information you need to use Salt on Windows. +.SS Multi\-minion setup on Windows +.sp +There may be a scenario where having a minion running in the context of the +current, logged\-in user would be useful. For example, the normal minion running +under the service account would perform machine\-wide, administrative tasks. The +minion running under the user context could be launched when the user logs in +and would be able to perform configuration tasks as if it were the user itself. +.sp +The steps required to do this are as follows: +.INDENT 0.0 +.IP 1. 3 +Create new root_dir +.IP 2. 3 +Set root_dir permissions +.IP 3. 3 +Create directory structure +.IP 4. 3 +Write minion config +.IP 5. 3 +Start the minion +.IP 6. 3 +Register the minion as a service (optional) +.UNINDENT +.sp +\fBNOTE:\fP +.INDENT 0.0 +.INDENT 3.5 +The Salt Project has created a powershell script that will configure an +additional minion on the system for you. It can be found in the root of the +Salt installation. The script is named \fBmulti\-minion.ps1\fP\&. You can get +help on how to use the script by running the following in a PowerShell +prompt: +.sp +\fBGet\-Help .\emulti\-minion.ps1 \-Detailed\fP +.UNINDENT +.UNINDENT +.sp +The following guide explains these steps in more detail. +.SS 1. Create new \fBroot_dir\fP +.sp +The minion requires a root directory to store config, cache, logs, etc. The user +must have full permissions to this directory. The easiest way to do this is to +put the \fBroot_dir\fP in the Local AppData directory (\fB$env:LocalAppData\fP). +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +New\-Item \-Path \(dq$env:LocalAppData\eSalt Project\eSalt\(dq \-Type Directory +.ft P +.fi +.UNINDENT +.UNINDENT +.SS 2. Set \fBroot_dir\fP permissions +.sp +The user running Salt requires full access to the \fBroot_dir\fP\&. If you have +placed the root_dir in a location that the user does not have access to, you\(aqll +need to give the user full permissions to that directory. Replace the + in this example with your own configuration information. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +$RootDir = \(dq\(dq +$User = \(dq\(dq +$acl = Get\-Acl \-Path \(dq$RootDir\(dq +$access_rule = New\-Object System.Security.AccessControl.FileSystemAccessRule($User, \(dqModify\(dq, \(dqAllow\(dq) +$acl.AddAccessRule($access_rule) +Set\-Acl \-Path \(dq$RootDir\(dq \-AclObject $acl +.ft P +.fi +.UNINDENT +.UNINDENT +.SS 3. Create directory structure +.sp +Salt expects a certain directory structure to be present to avoid unnecessary +messages in the logs. This is usually handled by the installer. Since you\(aqre +running your own instance, you need to do it. Make sure the following +directories are present: +.INDENT 0.0 +.INDENT 3.5 +.INDENT 0.0 +.IP \(bu 2 +root_dir\econf\eminion.d +.IP \(bu 2 +root_dir\econf\epki +.IP \(bu 2 +root_dir\evar\elog\esalt +.IP \(bu 2 +root_dir\evar\erun +.IP \(bu 2 +root_dir\evar\ecache\esalt\eminion\eextmods\egrains +.IP \(bu 2 +root_dir\evar\ecache\esalt\eminion\eproc +.UNINDENT +.UNINDENT +.UNINDENT +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +$RootDir = \(dq\(dq +$cache_dir = \(dq$RootDir\evar\ecache\esalt\eminion\(dq +New\-Item \-Path \(dq$RootDir\econf\(dq \-Type Directory +New\-Item \-Path \(dq$RootDir\econf\eminion.d\(dq \-Type Directory +New\-Item \-Path \(dq$RootDir\econf\epki\(dq \-Type Directory +New\-Item \-Path \(dq$RootDir\evar\elog\esalt\(dq \-Type Directory +New\-Item \-Path \(dq$RootDir\evar\erun\(dq \-Type Directory +New\-Item \-Path \(dq$cache_dir\eextmods\egrains\(dq \-Type Directory +New\-Item \-Path \(dq$cache_dir\eproc\(dq \-Type Directory +.ft P +.fi +.UNINDENT +.UNINDENT +.SS 4. Write minion config +.sp +The minion will need its own config, separate from the system minion config. +This config tells the minion where everything is located in the file structure +and also defines the master and minion id. Create a minion config file named +\fBminion\fP in the conf directory. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +New\-Item \-Path \(dq$env:LocalAppData\eSalt Project\eSalt\econf\eminion\(dq \-Type File +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +Make sure the config file has at least the following contents: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +master: +id: + +root_dir: +log_file: \eval\elog\esalt\eminion +utils_dirs: + \- \evar\ecache\esalt\eminion\eextmods +winrepo_dir: \esrv\esalt\ewin\erepo +winrepo_dir_ng: \esrv\esalt\ewin\erepo\-ng + +file_roots: + base: + \- \esrv\esalt + \- \esrv\espm\esalt + +pillar_roots: + base: + \- \esrv\epillar + \- \esrv\espm\epillar + +thorium_roots: + base: + \- \esrv\ethorium +.ft P +.fi +.UNINDENT +.UNINDENT +.SS 5. Run the minion +.sp +Everything is now set up to run the minion. You can start the minion as you +would normally, but you need to specify the full path to the config file you +created above. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +salt\-minion.exe \-c \econf +.ft P +.fi +.UNINDENT +.UNINDENT +.SS 6. Register the minion as a service (optional) +.sp +You can also register the minion as a service, but you need to understand the +implications of doing so. +.INDENT 0.0 +.IP \(bu 2 +You will need to have administrator privileges to register this minion +service. +.IP \(bu 2 +You will need the password to the user account that will be running the +minion. +.IP \(bu 2 +If the user password changes, you will have to update the service definition +to reflect the new password. +.IP \(bu 2 +The minion runs all the time under the user context, whether that user is +logged in or not. +.IP \(bu 2 +This requires great trust from the user as the minion will be able to perform +operations under the user\(aqs name without the user knowing, whether they are +logged in or not. +.IP \(bu 2 +If you decide to run the new minion under the Local System account, it might +as well just be a normal minion. +.IP \(bu 2 +The helper script does not support registering the second minion as a service. +.UNINDENT +.sp +To register the minion as a service, use the \fBssm.exe\fP binary that came with +the Salt installation. Run the following commands, replacing \fB\fP, +\fB\fP, \fB\fP, and \fB\fP as necessary: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +ssm.exe install \(dqsalt\-minion.exe\(dq \(dq\-c \(ga\(dq\econf\(ga\(dq \-l quiet\(dq +ssm.exe set Description \(dqSalt Minion \(dq +ssm.exe set Start SERVICE_AUTO_START +ssm.exe set AppStopMethodConsole 24000 +ssm.exe set AppStopMethodWindow 2000 +ssm.exe set AppRestartDelay 60000 +ssm.exe set ObjectName \(dq.\e\(dq \(dq\(dq +.ft P +.fi +.UNINDENT +.UNINDENT .SS Windows Package Manager .SS Introduction .sp @@ -466055,7 +467516,7 @@ Now you can run your tests: .sp .nf .ft C -python \-m nox \-e \(dqpytest\-3.7(coverage=False)\(dq \-\- tests/unit/cli/test_batch.py +python \-m nox \-e \(dqtest\-3(coverage=False)\(dq \-\- tests/unit/cli/test_batch.py .ft P .fi .UNINDENT @@ -466070,13 +467531,48 @@ this: .sp .nf .ft C -python \-m nox \-e \(dqpytest\-3.7(coverage=False)\(dq \-\- tests/unit/cli/test_batch.py; espeak \(dqTests done, woohoo!\(dq +python \-m nox \-e \(dqtest\-3(coverage=False)\(dq \-\- tests/unit/cli/test_batch.py; espeak \(dqTests done, woohoo!\(dq .ft P .fi .UNINDENT .UNINDENT .sp That way you don\(aqt have to keep monitoring the actual test run. +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +python \-m nox \-e \(dqtest\-3(coverage=False)\(dq \-\- \-\-core\-tests +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +You can enable or disable test groups locally by passing their respected flag: +.INDENT 0.0 +.IP \(bu 2 +\-\-no\-fast\-tests \- Tests that are ~10s or faster. Fast tests make up ~75% of tests and can run in 10 to 20 minutes. +.IP \(bu 2 +\-\-slow\-tests \- Tests that are ~10s or slower. +.IP \(bu 2 +\-\-core\-tests \- Tests of any speed that test the root parts of salt. +.IP \(bu 2 +\-\-flaky\-jail \- Test that need to be temporarily skipped. +.UNINDENT +.sp +In your PR, you can enable or disable test groups by setting a label. +All fast, slow, and core tests specified in the change file will always run. +.INDENT 0.0 +.IP \(bu 2 +test:no\-fast +.IP \(bu 2 +test:core +.IP \(bu 2 +test:slow +.IP \(bu 2 +test:flaky\-jail +.UNINDENT .SS Changelog and commit! .sp When you write your commit message you should use imperative style. Do @@ -466099,7 +467595,7 @@ But that advice is backwards for the changelog. We follow the our changelog, and use towncrier to generate it for each release. As a contributor, all that means is that you need to add a file to the \fBsalt/changelog\fP directory, using the \fB.\fP format. For -instanch, if you fixed issue 123, you would do: +instance, if you fixed issue 123, you would do: .INDENT 0.0 .INDENT 3.5 .sp @@ -466122,6 +467618,9 @@ If someone isn\(aqt an expert in this area, what will they need to know? .sp This will also help you out, because when you go to create the PR it will automatically insert the body of your commit messages. +.sp +See the \fI\%changelog\fP +docs for more information. .SS Pull request time! .sp Once you\(aqve done all your dev work and tested locally, you should check @@ -467842,6 +469341,78 @@ changes outside of the salt directory .UNINDENT .UNINDENT .UNINDENT +.SS Release Notes +.sp +You can edit the release notes to highlight a new feature being added +to a given release. The release notes are templatized with Jinja and +are generated at release time. +.SS How do I edit the release notes +.sp +To edit the release notes you need to look in doc/topics/releases/templates +for your given release and edit the template. Do not edit the release note +files in doc/topics/releases/, as this will be written over with the content +in the template file. For example, if you want to add content to the 3006.0 +release notes you would edit the doc/topics/releases/templates/3006.0.md.template +file. Do not edit the changelog portion of the template file, since that is +auto generated with the content generated for the changelog for each release. +.SS How to generate the release notes +.sp +This step is only used when we need to generate the release notes before releasing. +You should NOT need to run these steps as they are ran in the pipeline, but this +is documented so you can test your changes to the release notes template. +.sp +To generate the release notes requires the \fItools\fP command. The instructions below +will detail how to install and use \fItools\fP\&. +.SS Installing \fItools\fP +.sp +To view the output the release notes will produce before generating them +you can run \fItools\fP in draft mode: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools changelog update\-release\-notes \-\-draft +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +To generate the release notes just remove the \fI\-\-draft\fP argument: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools changelog update\-release\-notes +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +To specify a specific Salt version you add that version as an argument: +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools changelog update\-release\-notes 3006.0 +.ft P +.fi +.UNINDENT +.UNINDENT +.sp +To only generate the template for a new release +.INDENT 0.0 +.INDENT 3.5 +.sp +.nf +.ft C +tools changelog update\-release\-notes \-\-template\-only +.ft P +.fi +.UNINDENT +.UNINDENT .SS Reporting Bugs .sp Salt uses GitHub to track open issues and feature requests. @@ -468349,13 +469920,9 @@ The following dunder dictionaries are always defined, but may be empty .UNINDENT .SS __opts__ .sp -\&..versionchanged:: 3006.0 -.INDENT 0.0 -.INDENT 3.5 -The \fB__opts__\fP dictionary can now be accessed via +Changed in version 3006.0: The \fB__opts__\fP dictionary can now be accessed via \fBcontext\(ga\fP\&. -.UNINDENT -.UNINDENT + .sp Defined in: All modules .sp @@ -468449,13 +470016,6 @@ When running an execution module \fB__context__\fP persists across all module executions until the modules are refreshed; such as when \fI\%saltutil.sync_all\fP or \fI\%state.apply\fP are executed. -.sp -A great place to see how to use \fB__context__\fP is in the cp.py module in -salt/modules/cp.py. The fileclient authenticates with the master when it is -instantiated and then is used to copy files to the minion. Rather than create a -new fileclient for each file that is to be copied down, one instance of the -fileclient is instantiated in the \fB__context__\fP dictionary and is reused for -each file. Here is an example from salt/modules/cp.py: .INDENT 0.0 .INDENT 3.5 .sp @@ -468510,6 +470070,14 @@ Defined in: State .SS __sdb__ .sp Defined in: SDB +.SS __file_client__ +.sp +Changed in version 3006.5. + +.sp +The \fB__file_client__\fP dunder was added to states and execution modules. This +enables the use of a file client without haveing to instantiate one in +the module. .SS Configuration Options .sp A number of configuration options can affect the load process. This is a quick @@ -475184,7 +476752,7 @@ about the version numbering scheme. .UNINDENT .SS Upcoming release (release\-3006.0)= -.SS Salt 3006.0 release notes \- Codename Sulfur +.SS Salt 3006.0 release notes .SS Onedir packaging .sp Going forward from the 3006.0 release, the Salt Project will only provide onedir @@ -475192,6 +476760,34 @@ packages to install or upgrade Salt. The classic, non\-onedir packages will not be provided for supported operating systems. See \fI\%Upgrade to onedir\fP in the \fI\%Salt Install Guide\fP for information about upgrading from the classic packages to the onedir packages. +.SS Linux Packaging Salt Master Salt User and Group +.sp +The linux Salt Master packages will now add a Salt user and group +by default. The packages will add the \fBuser: salt\fP config option +to the Salt Master config. The Salt Master service will run as this +user. If you do not want to run the Salt Master as a different user +you can remove the \fBuser: salt\fP config from /etc/salt/master or change +the user to the desired user to run the Salt Master. +.sp +Since the Salt Master is running as a different user, you will need +to ensure you set the owner and group to \fBsalt\fP for your file_roots +and pillar_roots(commonly \fB/srv/salt\fP and \fB/srv/pillar\fP). +.sp +If you are running a Salt Master, Salt\-Api and a Salt Minion on the same +host using the new \fBsalt\fP user and you install a pip dependency into +the onedir environment using \fBsalt\-pip\fP or the \fBpip\fP module, you +need to to chown the directory \fB/opt/saltstack/salt/lib//site\-packages/\fP +with the \fBsalt\fP user and group. +.SS Caveat of salt\-pip +.sp +Salt ships with a wrapper script around pip called \fBsalt\-pip\fP\&. Users should +use \fBsalt\-pip\fP to install any python packages needed to extend Salt. +\fBsalt\-pip\fP installs python packages into an \fBextras\-3.10\fP directory located +in the root of the ondir directory, by setting the \fB\-\-target\fP argument for +pip. This ensures those packages remain installed when upgrading Salt. There is +a known bug in pip when using \fB\-\-target\fP where scripts and other non\-python +assets may not be cleaned up properly when un\-installing. The Salt team is +working to resolve this bug in the up\-stream pip project. .SS Dropping support for Python 3.5 and 3.6 .sp Python 3.5 and 3.6 will no longer be supported by Salt since they @@ -475202,7 +476798,8 @@ for more information. .sp All netapi clients, which provide the functionality to \fBsalt\-api\fP, will now be disabled by default as a security precaution. If you use \fBsalt\-api\fP, you -must add the new \fBnetapi_enable_clients\fP option to your salt master config.This is a breaking change and the \fBsalt\-api\fP will not function without this +must add the new \fBnetapi_enable_clients\fP option to your salt master config. +This is a breaking change and the \fBsalt\-api\fP will not function without this new configuration option. See \fI\%Enabling netapi client interfaces\fP for more information. .SS How do I migrate to the onedir packages? @@ -475259,6 +476856,8 @@ Removed the PyObjC dependency. This addresses problems with building a one dir build for macOS. It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). Since it\(aqs currently not being used, it\(aqs removed. \fI\%#62432\fP +.IP \(bu 2 +Removed \fBSixRedirectImporter\fP from Salt. Salt hasn\(aqt shipped \fBsix\fP since Salt 3004. \fI\%#63874\fP .UNINDENT .SS Deprecated .INDENT 0.0 @@ -475327,6 +476926,8 @@ Stop relying on \fBsalt/_version.py\fP to write Salt\(aqs version. Instead use \ Set enable_fqdns_grains to be False by default. \fI\%#63595\fP .IP \(bu 2 Changelog snippet files must now have a \fB\&.md\fP file extension to be more explicit on what type of rendering is done when they are included in the main \fBCHANGELOG.md\fP file. \fI\%#63710\fP +.IP \(bu 2 +Upgraded to \fBrelenv==0.9.0\fP \fI\%#63883\fP .UNINDENT .SS Fixed .INDENT 0.0 @@ -475618,6 +477219,8 @@ Change startup ReqServer log messages from error to info level. \fI\%#62728\fP .IP \(bu 2 Fix kmod.* functions hard code relative command name \fI\%#62772\fP .IP \(bu 2 +Remove mako as a dependency in Windows and macOS. \fI\%#62785\fP +.IP \(bu 2 Fix mac_brew_pkg to work with null taps \fI\%#62793\fP .IP \(bu 2 Fixing a bug when listing the running schedule if \(dqschedule.enable\(dq and/or \(dqschedule.disable\(dq has been run, where the \(dqenabled\(dq items is being treated as a schedule item. \fI\%#62795\fP @@ -475687,17 +477290,60 @@ TCP transport documentation now contains proper master/minion\-side filtering in .IP \(bu 2 Fixed gpg.verify does not respect gnupghome \fI\%#63145\fP .IP \(bu 2 +User responsible for the runner is now correctly reported in the events on the event bus for the runner. \fI\%#63148\fP +.IP \(bu 2 Made pillar cache pass extra minion data as well \fI\%#63208\fP .IP \(bu 2 Fix serious performance issues with the file.tidied module \fI\%#63231\fP .IP \(bu 2 +Fix rpm_lowpkg version comparison logic when using rpm\-vercmp and only one version has a release number. \fI\%#63317\fP +.IP \(bu 2 Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available \fI\%#63350\fP .IP \(bu 2 +\fBservice.status\fP on Windows does no longer throws a CommandExecutionError if +the service is not found on the system. It now returns \(dqNot Found\(dq instead. \fI\%#63577\fP +.IP \(bu 2 When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. \fI\%#63590\fP .IP \(bu 2 LGPO: Added support for \(dqRelax minimum password length limits\(dq \fI\%#63596\fP .IP \(bu 2 +Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (\fBdelete_after\fP) \fI\%#63650\fP +.IP \(bu 2 +When a job is disabled only increase it\(aqs _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. \fI\%#63699\fP +.IP \(bu 2 +have salt.template.compile_template_str cleanup its temp files. \fI\%#63724\fP +.IP \(bu 2 Check file is not empty before attempting to read pillar disk cache file \fI\%#63729\fP +.IP \(bu 2 +Fixed an issue with generating fingerprints for public keys with different line endings \fI\%#63742\fP +.IP \(bu 2 +Add \fBfileserver_interval\fP and \fBmaintenance_interval\fP master configuration options. These options control how often to restart the FileServerUpdate and Maintenance processes. Some file server and pillar configurations are known to cause memory leaks over time. A notable example of this are configurations that use pygit2. Salt can not guarantee dependency libraries like pygit2 won\(aqt leak memory. Restarting any long running processes that use pygit2 guarantees we can keep the master\(aqs memory usage in check. \fI\%#63747\fP +.IP \(bu 2 +mac_xattr.list and mac_xattr.read will replace undecode\-able bytes to avoid raising CommandExecutionError. \fI\%#63779\fP \fI\%#63779\fP +.IP \(bu 2 +Change default GPG keyserver from \fI\%pgp.mit.edu\fP to \fI\%keys.openpgp.org\fP\&. \fI\%#63806\fP +.IP \(bu 2 +fix cherrypy 400 error output to be less generic. \fI\%#63835\fP +.IP \(bu 2 +Ensure kwargs is passed along to _call_apt when passed into install function. \fI\%#63847\fP +.IP \(bu 2 +remove eval and update logging to be more informative on bad config \fI\%#63879\fP +.IP \(bu 2 +add linux_distribution to util to stop dep warning \fI\%#63904\fP +.IP \(bu 2 +Fix valuerror when trying to close fileclient. Remove usage of \fBdel\fP and close the filclient properly. \fI\%#63920\fP +.IP \(bu 2 +Handle the situation when a sub proxy minion does not init properly, eg. an exception happens, and the sub proxy object is not available. \fI\%#63923\fP +.IP \(bu 2 +Clarifying documentation for extension_modules configuration option. \fI\%#63929\fP +.IP \(bu 2 +Windows pkg module now properly handles versions containing strings \fI\%#63935\fP +.IP \(bu 2 +Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn\(aqt used by that function. \fI\%#63948\fP +.IP \(bu 2 +Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows \fI\%#63981\fP +.IP \(bu 2 +Hardened permissions on workers.ipc and master_event_pub.ipc. \fI\%#64063\fP .UNINDENT .SS Added .INDENT 0.0 @@ -475801,6 +477447,8 @@ Add ability to ignore symlinks in file.tidied \fI\%#63042\fP .IP \(bu 2 salt\-cloud support IMDSv2 tokens when using \(aquse\-instance\-role\-credentials\(aq \fI\%#63067\fP .IP \(bu 2 +Fix running fast tests twice and add git labels to suite. \fI\%#63081\fP +.IP \(bu 2 Add ability for file.symlink to not set ownership on existing links \fI\%#63093\fP .IP \(bu 2 Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation \fI\%#63095\fP @@ -475824,6 +477472,533 @@ enable them globally for all state runs. \fI\%#63316\fP Allow max queue size setting for state runs to prevent performance problems from queue growth \fI\%#63356\fP .IP \(bu 2 Add support of exposing meta_server_grains for Azure VMs \fI\%#63606\fP +.IP \(bu 2 +Include the version of \fBrelenv\fP in the versions report. \fI\%#63827\fP +.IP \(bu 2 +Added debug log messages displaying the command being run when removing packages on Windows \fI\%#63866\fP +.IP \(bu 2 +Adding the ability to exclude arguments from a state that end up passed to cmd.retcode when requisites such as onlyif or unless are used. \fI\%#63956\fP +.IP \(bu 2 +Add \-\-next\-release argument to salt/version.py, which prints the next upcoming release. \fI\%#64023\fP +.UNINDENT +.SS Security +.INDENT 0.0 +.IP \(bu 2 +Upgrade Requirements Due to Security Issues. +.INDENT 2.0 +.IP \(bu 2 +Upgrade to \fBcryptography>=39.0.1\fP due to: +.INDENT 2.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-x4qr\-2fvf\-3mr5\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-w7pp\-m8wf\-vj6r\fP +.UNINDENT +.IP \(bu 2 +Upgrade to \fBpyopenssl==23.0.0\fP due to the cryptography upgrade. +.IP \(bu 2 +Update to \fBmarkdown\-it\-py==2.2.0\fP due to: +.INDENT 2.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-jrwr\-5x3p\-hvc3\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-vrjv\-mxr7\-vjf8\fP \fI\%#63882\fP +.UNINDENT +.UNINDENT +.UNINDENT +(release\-3006.1)= +.SS Salt 3006.1 release notes +.SS Changelog +.SS Fixed +.INDENT 0.0 +.IP \(bu 2 +Check that the return data from the cloud create function is a dictionary before attempting to pull values out. \fI\%#61236\fP +.IP \(bu 2 +Ensure NamedLoaderContext\(aqs have their value() used if passing to other modules \fI\%#62477\fP +.IP \(bu 2 +add documentation note about reactor state ids. \fI\%#63589\fP +.IP \(bu 2 +Added support for \fBtest=True\fP to the \fBfile.cached\fP state module \fI\%#63785\fP +.IP \(bu 2 +Updated \fBsource_hash\fP documentation and added a log warning when \fBsource_hash\fP is used with a source other than \fBhttp\fP, \fBhttps\fP and \fBftp\fP\&. \fI\%#63810\fP +.IP \(bu 2 +Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. \fI\%#64081\fP +.IP \(bu 2 +Fix dmsetup device names with hyphen being picked up. \fI\%#64082\fP +.IP \(bu 2 +Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. +This event is only used when these functions are called via the schedule execution modules. +Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. \fI\%#64102\fP, \fI\%#64103\fP +.IP \(bu 2 +Default to a 0 timeout if none is given for the terraform roster to avoid \fB\-o ConnectTimeout=None\fP when using \fBsalt\-ssh\fP \fI\%#64109\fP +.IP \(bu 2 +Disable class level caching of the file client on \fBSaltCacheLoader\fP and properly use context managers to take care of initialization and termination of the file client. \fI\%#64111\fP +.IP \(bu 2 +Fixed several file client uses which were not properly terminating it by switching to using it as a context manager +whenever possible or making sure \fB\&.destroy()\fP was called when using a context manager was not possible. \fI\%#64113\fP +.IP \(bu 2 +Fix running \fI\%setup.py\fP when passing in \-\-salt\-config\-dir and \-\-salt\-cache\-dir arguments. \fI\%#64114\fP +.IP \(bu 2 +Moved /etc/salt/proxy and /lib/systemd/system/salt\-proxy@.service to the salt\-minion DEB package \fI\%#64117\fP +.IP \(bu 2 +Stop passing \fB**kwargs\fP and be explicit about the keyword arguments to pass, namely, to \fBcp.cache_file\fP call in \fBsalt.states.pkg\fP \fI\%#64118\fP +.IP \(bu 2 +lgpo_reg.set_value now returns \fBTrue\fP on success instead of \fBNone\fP \fI\%#64126\fP +.IP \(bu 2 +Make salt user\(aqs home /opt/saltstack/salt \fI\%#64141\fP +.IP \(bu 2 +Fix cmd.run doesn\(aqt output changes in test mode \fI\%#64150\fP +.IP \(bu 2 +Move salt user and group creation to common package \fI\%#64158\fP +.IP \(bu 2 +Fixed issue in salt\-cloud so that multiple masters specified in the cloud +are written to the minion config properly \fI\%#64170\fP +.IP \(bu 2 +Make sure the \fBsalt\-ssh\fP CLI calls it\(aqs \fBfsclient.destroy()\fP method when done. \fI\%#64184\fP +.IP \(bu 2 +Stop using the deprecated \fBsalt.transport.client\fP imports. \fI\%#64186\fP +.IP \(bu 2 +Add a \fB\&.pth\fP to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. \fI\%#64192\fP +.IP \(bu 2 +Fix \fBlgpo_reg\fP state to work with User policy \fI\%#64200\fP +.IP \(bu 2 +Cloud deployment directories are owned by salt user and group \fI\%#64204\fP +.IP \(bu 2 +\fBlgpo_reg\fP state now enforces and reports changes to the registry \fI\%#64222\fP +.UNINDENT +(release\-3006.2)= +.SS Salt 3006.2 release notes +.SS Changelog +.SS Fixed +.INDENT 0.0 +.IP \(bu 2 +In scenarios where PythonNet fails to load, Salt will now fall back to WMI for +gathering grains information \fI\%#64897\fP +.UNINDENT +.SS Security +.INDENT 0.0 +.IP \(bu 2 +fix CVE\-2023\-20897 by catching exception instead of letting exception disrupt connection \fI\%#cve\-2023\-20897\fP +.IP \(bu 2 +Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. \fI\%#cve\-2023\-20898\fP +.IP \(bu 2 +Upgrade to \fBrequests==2.31.0\fP +.sp +Due to: +.INDENT 2.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-j8r2\-6x86\-q33q\fP \fI\%#64336\fP +.UNINDENT +.IP \(bu 2 +Upgrade to \fBcryptography==41.0.3\fP(and therefor \fBpyopenssl==23.2.0\fP due to \fI\%https://github.com/advisories/GHSA\-jm77\-qphf\-c4w8\fP) +.sp +This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre\-existing wheels. +.sp +Also resolves the following cryptography advisories: +.sp +Due to: +.INDENT 2.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-5cpq\-8wj7\-hf2v\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-x4qr\-2fvf\-3mr5\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-w7pp\-m8wf\-vj6r\fP +.UNINDENT +.sp +There is no security upgrade available for Py3.5 \fI\%#64595\fP +.IP \(bu 2 +Bump to \fBcertifi==2023.07.22\fP due to \fI\%https://github.com/advisories/GHSA\-xqr8\-7jwr\-rhp7\fP \fI\%#64718\fP +.IP \(bu 2 +Upgrade \fBrelenv\fP to \fB0.13.2\fP and Python to \fB3.10.12\fP +.sp +Addresses multiple CVEs in Python\(aqs dependencies: \fI\%https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python\-3\-10\-12\fP \fI\%#64719\fP +.UNINDENT +(release\-3006.3)= +.SS Salt 3006.3 release notes +.SS Changelog +.SS Removed +.INDENT 0.0 +.IP \(bu 2 +Fedora 36 support was removed because it reached EOL \fI\%#64315\fP +.IP \(bu 2 +Handle deprecation warnings: +.INDENT 2.0 +.IP \(bu 2 +Switch to \fBFullArgSpec\fP since Py 3.11 no longer has \fBArgSpec\fP, deprecated since Py 3.0 +.IP \(bu 2 +Stop using the deprecated \fBcgi\fP module +.IP \(bu 2 +Stop using the deprecated \fBpipes\fP module +.IP \(bu 2 +Stop using the deprecated \fBimp\fP module \fI\%#64553\fP +.UNINDENT +.UNINDENT +.SS Changed +.INDENT 0.0 +.IP \(bu 2 +Replace libnacl with PyNaCl \fI\%#64372\fP +.IP \(bu 2 +Don\(aqt hardcode the python version on the Salt Package tests and on the \fBpkg/debian/salt\-cloud.postinst\fP file \fI\%#64553\fP +.IP \(bu 2 +Some more deprecated code fixes: +.INDENT 2.0 +.IP \(bu 2 +Stop using the deprecated \fBlocale.getdefaultlocale()\fP function +.IP \(bu 2 +Stop accessing deprecated attributes +.IP \(bu 2 +\fBpathlib.Path.__enter__()\fP usage is deprecated and not required, a no\-op \fI\%#64565\fP +.UNINDENT +.IP \(bu 2 +Bump to \fBpyyaml==6.0.1\fP due to \fI\%https://github.com/yaml/pyyaml/issues/601\fP and address lint issues \fI\%#64657\fP +.UNINDENT +.SS Fixed +.INDENT 0.0 +.IP \(bu 2 +Fix for assume role when used salt\-cloud to create aws ec2. \fI\%#52501\fP +.IP \(bu 2 +fixes aptpkg module by checking for blank comps. \fI\%#58667\fP +.IP \(bu 2 +\fBwheel.file_roots.find\fP is now able to find files in subdirectories of the roots. \fI\%#59800\fP +.IP \(bu 2 +pkg.latest no longer fails when multiple versions are reported to be installed (e.g. updating the kernel) \fI\%#60931\fP +.IP \(bu 2 +Do not update the credentials dictionary in \fButils/aws.py\fP while iterating over it, and use the correct delete functionality \fI\%#61049\fP +.IP \(bu 2 +fixed runner not having a proper exit code when runner modules throw an exception. \fI\%#61173\fP +.IP \(bu 2 +\fBpip.list_all_versions\fP now works with \fBindex_url\fP and \fBextra_index_url\fP \fI\%#61610\fP +.IP \(bu 2 +speed up file.recurse by using prefix with cp.list_master_dir and remove an un\-needed loop. \fI\%#61998\fP +.IP \(bu 2 +Preserve test=True condition while running sub states. \fI\%#62590\fP +.IP \(bu 2 +Job returns are only sent to originating master \fI\%#62834\fP +.IP \(bu 2 +Fixes an issue with failing subsequent state runs with the lgpo state module. +The \fBlgpo.get_polcy\fP function now returns all boolean settings. \fI\%#63296\fP +.IP \(bu 2 +Fix SELinux get policy with trailing whitespace \fI\%#63336\fP +.IP \(bu 2 +Fixes an issue with boolean settings not being reported after being set. The +\fBlgpo.get_polcy\fP function now returns all boolean settings. \fI\%#63473\fP +.IP \(bu 2 +Ensure body is returned when salt.utils.http returns something other than 200 with tornado backend. \fI\%#63557\fP +.IP \(bu 2 +Allow long running pillar and file client requests to finish using request_channel_timeout and request_channel_tries minion config. \fI\%#63824\fP +.IP \(bu 2 +Fix state_queue type checking to allow int values \fI\%#64122\fP +.IP \(bu 2 +Call global logger when catching pip.list exceptions in states.pip.installed +Rename global logger \fBlog\fP to \fBlogger\fP inside pip_state \fI\%#64169\fP +.IP \(bu 2 +Fixes permissions created by the Debian and RPM packages for the salt user. +.sp +The salt user created by the Debian and RPM packages to run the salt\-master process, was previously given ownership of various directories in a way which compromised the benefits of running the salt\-master process as a non\-root user. +.sp +This fix sets the salt user to only have write access to those files and +directories required for the salt\-master process to run. \fI\%#64193\fP +.IP \(bu 2 +Fix user.present state when groups is unset to ensure the groups are unchanged, as documented. \fI\%#64211\fP +.IP \(bu 2 +Fixes issue with MasterMinion class loading configuration from \(ga/etc/salt/minion.d/*.conf. +.sp +The MasterMinion class (used for running orchestraions on master and other functionality) was incorrectly loading configuration from \fB/etc/salt/minion.d/*.conf\fP, when it should only load configuration from \fB/etc/salt/master\fP and \fB/etc/salt/master.d/*.conf\fP\&. \fI\%#64219\fP +.IP \(bu 2 +Fixed issue in mac_user.enable_auto_login that caused the user\(aqs keychain to be reset at each boot \fI\%#64226\fP +.IP \(bu 2 +Fixed KeyError in logs when running a state that fails. \fI\%#64231\fP +.IP \(bu 2 +Fixed x509_v2 \fBcreate_private_key\fP/\fBcreate_crl\fP unknown kwargs: __pub_fun... \fI\%#64232\fP +.IP \(bu 2 +remove the hard coded python version in error. \fI\%#64237\fP +.IP \(bu 2 +\fBsalt\-pip\fP now properly errors out when being called from a non \fBonedir\fP environment. \fI\%#64249\fP +.IP \(bu 2 +Ensure we return an error when adding the key fails in the pkgrepo state for debian hosts. \fI\%#64253\fP +.IP \(bu 2 +Fixed file client private attribute reference on \fBSaltMakoTemplateLookup\fP \fI\%#64280\fP +.IP \(bu 2 +Fix pkgrepo.absent failures on apt\-based systems when repo either a) contains a +trailing slash, or b) there is an arch mismatch. \fI\%#64286\fP +.IP \(bu 2 +Fix detection of Salt codename by \(dqsalt_version\(dq execution module \fI\%#64306\fP +.IP \(bu 2 +Ensure selinux values are handled lowercase \fI\%#64318\fP +.IP \(bu 2 +Remove the \fBclr.AddReference\fP, it is causing an \fBIllegal characters in path\fP exception \fI\%#64339\fP +.IP \(bu 2 +Update \fBpkg.group_installed\fP state to support repo options \fI\%#64348\fP +.IP \(bu 2 +Fix salt user login shell path in Debian packages \fI\%#64377\fP +.IP \(bu 2 +Allow for multiple user\(aqs keys presented when authenticating, for example: root, salt, etc. \fI\%#64398\fP +.IP \(bu 2 +Fixed an issue with \fBlgpo_reg\fP where existing entries for the same key in +\fBRegistry.pol\fP were being overwritten in subsequent runs if the value name in +the subesequent run was contained in the existing value name. For example, a +key named \fBSetUpdateNotificationLevel\fP would be overwritten by a subsequent +run attempting to set \fBUpdateNotificationLevel\fP \fI\%#64401\fP +.IP \(bu 2 +Add search for %ProgramData%\eChocolatey\echoco.exe to determine if Chocolatey is installed or not \fI\%#64427\fP +.IP \(bu 2 +Fix regression for user.present on handling groups with dupe GIDs \fI\%#64430\fP +.IP \(bu 2 +Fix inconsistent use of args in ssh_auth.managed \fI\%#64442\fP +.IP \(bu 2 +Ensure we raise an error when the name argument is invalid in pkgrepo.managed state for systems using apt. \fI\%#64451\fP +.IP \(bu 2 +Fix file.symlink will not replace/update existing symlink \fI\%#64477\fP +.IP \(bu 2 +Fixed salt\-ssh state.* commands returning retcode 0 when state/pillar rendering fails \fI\%#64514\fP +.IP \(bu 2 +Fix pkg.install when using a port in the url. \fI\%#64516\fP +.IP \(bu 2 +\fBwin_pkg\fP Fixes an issue runing \fBpkg.install\fP with \fBversion=latest\fP where the +new installer would not be cached if there was already an installer present +with the same name. \fI\%#64519\fP +.IP \(bu 2 +Added a \fBtest:full\fP label in the salt repository, which, when selected, will force a full test run. \fI\%#64539\fP +.IP \(bu 2 +Syndic\(aqs async_req_channel uses the asynchornous version of request channel \fI\%#64552\fP +.IP \(bu 2 +Ensure runners properly save information to job cache. \fI\%#64570\fP +.IP \(bu 2 +Added salt.ufw to salt\-master install on Debian and Ubuntu \fI\%#64572\fP +.IP \(bu 2 +Added support for Chocolatey 2.0.0+ while maintaining support for older versions \fI\%#64622\fP +.IP \(bu 2 +Updated semanage fcontext to use \-\-modify if context already exists when adding context \fI\%#64625\fP +.IP \(bu 2 +Preserve request client socket between requests. \fI\%#64627\fP +.IP \(bu 2 +Show user friendly message when pillars timeout \fI\%#64651\fP +.IP \(bu 2 +File client timeouts durring jobs show user friendly errors instead of tracbacks \fI\%#64653\fP +.IP \(bu 2 +SaltClientError does not log a traceback on minions, we expect these to happen so a user friendly log is shown. \fI\%#64729\fP +.IP \(bu 2 +Look in location salt is running from, this accounts for running from an unpacked onedir file that has not been installed. \fI\%#64877\fP +.IP \(bu 2 +Preserve credentials on spawning platforms, minions no longer re\-authenticate +with every job when using \fBmultiprocessing=True\fP\&. \fI\%#64914\fP +.IP \(bu 2 +Fixed uninstaller to not remove the \fBsalt\fP directory by default. This allows +the \fBextras\-3.##\fP folder to persist so salt\-pip dependencies are not wiped out +during an upgrade. \fI\%#64957\fP +.IP \(bu 2 +fix msteams by adding the missing header that Microsoft is now enforcing. \fI\%#64973\fP +.IP \(bu 2 +Fix \fBenv\fP and improve cache cleaning see more info at pull #65017. \fI\%#65002\fP +.IP \(bu 2 +Better error message on inconsistent decoded payload \fI\%#65020\fP +.IP \(bu 2 +Handle permissions access error when calling \fBlsb_release\fP with the salt user \fI\%#65024\fP +.IP \(bu 2 +Allow schedule state module to update schedule when the minion is offline. \fI\%#65033\fP +.IP \(bu 2 +Fixed creation of wildcard DNS in SAN in \fBx509_v2\fP \fI\%#65072\fP +.IP \(bu 2 +The macOS installer no longer removes the extras directory \fI\%#65073\fP +.UNINDENT +.SS Added +.INDENT 0.0 +.IP \(bu 2 +Added a script to automate setting up a 2nd minion in a user context on Windows \fI\%#64439\fP +.IP \(bu 2 +Several fixes to the CI workflow: +.INDENT 2.0 +.IP \(bu 2 +Don\(aqt override the \fBon\fP Jinja block on the \fBci.yaml\fP template. This enables reacting to labels getting added/removed +to/from pull requests. +.IP \(bu 2 +Switch to using \fBtools\fP and re\-use the event payload available instead of querying the GH API again to get the pull +request labels +.IP \(bu 2 +Concentrate test selection by labels to a single place +.IP \(bu 2 +Enable code coverage on pull\-requests by setting the \fBtest:coverage\fP label \fI\%#64547\fP +.UNINDENT +.UNINDENT +.SS Security +.INDENT 0.0 +.IP \(bu 2 +Upgrade to \fBcryptography==41.0.3\fP(and therefor \fBpyopenssl==23.2.0\fP due to \fI\%https://github.com/advisories/GHSA\-jm77\-qphf\-c4w8\fP) +.sp +This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre\-existing wheels. +.sp +Also resolves the following cryptography advisories: +.sp +Due to: +.INDENT 2.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-5cpq\-8wj7\-hf2v\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-x4qr\-2fvf\-3mr5\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-w7pp\-m8wf\-vj6r\fP \fI\%#64595\fP +.UNINDENT +.IP \(bu 2 +Bump to \fBaiohttp==3.8.5\fP due to \fI\%https://github.com/advisories/GHSA\-45c4\-8wx5\-qw6w\fP \fI\%#64687\fP +.IP \(bu 2 +Bump to \fBcertifi==2023.07.22\fP due to \fI\%https://github.com/advisories/GHSA\-xqr8\-7jwr\-rhp7\fP \fI\%#64718\fP +.IP \(bu 2 +Upgrade \fBrelenv\fP to \fB0.13.2\fP and Python to \fB3.10.12\fP +.sp +Addresses multiple CVEs in Python\(aqs dependencies: \fI\%https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python\-3\-10\-12\fP \fI\%#64719\fP +.IP \(bu 2 +Update to \fBgitpython>=3.1.32\fP due to \fI\%https://github.com/advisories/GHSA\-pr76\-5cm5\-w9cj\fP \fI\%#64988\fP +.UNINDENT +(release\-3006.4)= +.SS Salt 3006.4 release notes +.SS Changelog +.SS Security +.INDENT 0.0 +.IP \(bu 2 +Fix CVE\-2023\-34049 by ensuring we do not use a predictable name for the script and correctly check returncode of scp command. +This only impacts salt\-ssh users using the pre\-flight option. \fI\%#cve\-2023\-34049\fP +.IP \(bu 2 +Update to \fBgitpython>=3.1.35\fP due to \fI\%https://github.com/advisories/GHSA\-wfm5\-v35h\-vwf4\fP and \fI\%https://github.com/advisories/GHSA\-cwvm\-v4w8\-q58c\fP \fI\%#65163\fP +.IP \(bu 2 +Bump to \fBcryptography==41.0.4\fP due to \fI\%https://github.com/advisories/GHSA\-v8gr\-m533\-ghj9\fP \fI\%#65268\fP +.IP \(bu 2 +Upgrade relenv to 0.13.12 to address CVE\-2023\-4807 \fI\%#65316\fP +.IP \(bu 2 +Bump to \fBurllib3==1.26.17\fP or \fBurllib3==2.0.6\fP due to \fI\%https://github.com/advisories/GHSA\-v845\-jxx5\-vc9f\fP \fI\%#65334\fP +.IP \(bu 2 +Bump to \fBgitpython==3.1.37\fP due to \fI\%https://github.com/advisories/GHSA\-cwvm\-v4w8\-q58c\fP \fI\%#65383\fP +.UNINDENT +(release\-3006.5)= +.SS Salt 3006.5 release notes +.SS Changelog +.SS Removed +.INDENT 0.0 +.IP \(bu 2 +Tech Debt \- support for pysss removed due to functionality addition in Python 3.3 \fI\%#65029\fP +.UNINDENT +.SS Fixed +.INDENT 0.0 +.IP \(bu 2 +Improved error message when state arguments are accidentally passed as a string \fI\%#38098\fP +.IP \(bu 2 +Allow \fBpip.install\fP to create a log file that is passed in if the parent directory is writeable \fI\%#44722\fP +.IP \(bu 2 +Fixed merging of complex pillar overrides with salt\-ssh states \fI\%#59802\fP +.IP \(bu 2 +Fixed gpg pillar rendering with salt\-ssh \fI\%#60002\fP +.IP \(bu 2 +Made salt\-ssh states not re\-render pillars unnecessarily \fI\%#62230\fP +.IP \(bu 2 +Made Salt maintain options in Debian package repo definitions \fI\%#64130\fP +.IP \(bu 2 +Migrated all \fI\%invoke\fP tasks to \fI\%python\-tools\-scripts\fP\&. +.INDENT 2.0 +.IP \(bu 2 +\fBtasks/docs.py\fP \-> \fBtools/precommit/docs.py\fP +.IP \(bu 2 +\fBtasks/docstrings.py\fP \-> \fBtools/precommit/docstrings.py\fP +.IP \(bu 2 +\fBtasks/loader.py\fP \-> \fBtools/precommit/loader.py\fP +.IP \(bu 2 +\fBtasks/filemap.py\fP \-> \fBtools/precommit/filemap.py\fP \fI\%#64374\fP +.UNINDENT +.IP \(bu 2 +Fix salt user login shell path in Debian packages \fI\%#64377\fP +.IP \(bu 2 +Fill out lsb_distrib_xxxx (best estimate) grains if problems with retrieving lsb_release data \fI\%#64473\fP +.IP \(bu 2 +Fixed an issue in the \fBfile.directory\fP state where the \fBchildren_only\fP keyword +argument was not being respected. \fI\%#64497\fP +.IP \(bu 2 +Move salt.ufw to correct location /etc/ufw/applications.d/ \fI\%#64572\fP +.IP \(bu 2 +Fixed salt\-ssh stacktrace when retcode is not an integer \fI\%#64575\fP +.IP \(bu 2 +Fixed SSH shell seldomly fails to report any exit code \fI\%#64588\fP +.IP \(bu 2 +Fixed some issues in x509_v2 execution module private key functions \fI\%#64597\fP +.IP \(bu 2 +Fixed grp.getgrall() in utils/user.py causing performance issues \fI\%#64888\fP +.IP \(bu 2 +Fix user.list_groups omits remote groups via sssd, etc. \fI\%#64953\fP +.IP \(bu 2 +Ensure sync from _grains occurs before attempting pillar compilation in case custom grain used in pillar file \fI\%#65027\fP +.IP \(bu 2 +Moved gitfs locks to salt working dir to avoid lock wipes \fI\%#65086\fP +.IP \(bu 2 +Only attempt to create a keys directory when \fB\-\-gen\-keys\fP is passed to the \fBsalt\-key\fP CLI \fI\%#65093\fP +.IP \(bu 2 +Fix nonce verification, request server replies do not stomp on eachother. \fI\%#65114\fP +.IP \(bu 2 +speed up yumpkg list_pkgs by not requiring digest or signature verification on lookup. \fI\%#65152\fP +.IP \(bu 2 +Fix pkg.latest failing on windows for winrepo packages where the package is already up to date \fI\%#65165\fP +.IP \(bu 2 +Ensure \fBkwarg\fP is preserved when checking for kwargs. This change affects proxy minions when used with Deltaproxy, which had kwargs popped when targeting multiple minions id. \fI\%#65179\fP +.IP \(bu 2 +Fixes traceback when state id is an int in a reactor SLS file. \fI\%#65210\fP +.IP \(bu 2 +Install logrotate config as /etc/logrotate.d/salt\-common for Debian packages +Remove broken /etc/logrotate.d/salt directory from 3006.3 if it exists. \fI\%#65231\fP +.IP \(bu 2 +Use \fBsha256\fP as the default \fBhash_type\fP\&. It has been the default since Salt v2016.9 \fI\%#65287\fP +.IP \(bu 2 +Preserve ownership on log rotation \fI\%#65288\fP +.IP \(bu 2 +Ensure that the correct value of jid_inclue is passed if the argument is included in the passed keyword arguments. \fI\%#65302\fP +.IP \(bu 2 +Uprade relenv to 0.14.2 +.INDENT 2.0 +.IP \(bu 2 +Update openssl to address CVE\-2023\-5363. +.IP \(bu 2 +Fix bug in openssl setup when openssl binary can\(aqt be found. +.IP \(bu 2 +Add M1 mac support. \fI\%#65316\fP +.UNINDENT +.IP \(bu 2 +Fix regex for filespec adding/deleting fcontext policy in selinux \fI\%#65340\fP +.IP \(bu 2 +Ensure CLI options take priority over Saltfile options \fI\%#65358\fP +.IP \(bu 2 +Test mode for state function \fBsaltmod.wheel\fP no longer set\(aqs \fBresult\fP to \fB(None,)\fP \fI\%#65372\fP +.IP \(bu 2 +Client only process events which tag conforms to an event return. \fI\%#65400\fP +.IP \(bu 2 +Fixes an issue setting user or machine policy on Windows when the Group Policy +directory is missing \fI\%#65411\fP +.IP \(bu 2 +Fix regression in file module which was not re\-using a file client. \fI\%#65450\fP +.IP \(bu 2 +pip.installed state will now properly fail when a specified user does not exists \fI\%#65458\fP +.IP \(bu 2 +Publish channel connect callback method properly closes it\(aqs request channel. \fI\%#65464\fP +.IP \(bu 2 +Ensured the pillar in SSH wrapper modules is the same as the one used in template rendering when overrides are passed \fI\%#65483\fP +.IP \(bu 2 +Fix file.comment ignore_missing not working with multiline char \fI\%#65501\fP +.IP \(bu 2 +Warn when an un\-closed transport client is being garbage collected. \fI\%#65554\fP +.IP \(bu 2 +Only generate the HMAC\(aqs for \fBlibssl.so.1.1\fP and \fBlibcrypto.so.1.1\fP if those files exist. \fI\%#65581\fP +.IP \(bu 2 +Fixed an issue where Salt Cloud would fail if it could not delete lingering +PAexec binaries \fI\%#65584\fP +.UNINDENT +.SS Added +.INDENT 0.0 +.IP \(bu 2 +Added Salt support for Debian 12 \fI\%#64223\fP +.IP \(bu 2 +Added Salt support for Amazon Linux 2023 \fI\%#64455\fP +.UNINDENT +.SS Security +.INDENT 0.0 +.IP \(bu 2 +Bump to \fBcryptography==41.0.4\fP due to \fI\%https://github.com/advisories/GHSA\-v8gr\-m533\-ghj9\fP \fI\%#65268\fP +.IP \(bu 2 +Bump to \fBcryptography==41.0.7\fP due to \fI\%https://github.com/advisories/GHSA\-jfhm\-5ghh\-2f97\fP \fI\%#65643\fP .UNINDENT .sp See \fI\%Install a release candidate\fP @@ -476733,6 +478908,87 @@ Ensure the status of the service is captured when the beacon function is called, .IP \(bu 2 The sub proxies controlled by Deltaproxy need to have their own req_channel otherwise there are timeout exceptions when the __master_req_channel_payload is fired and reacted on. (#62708) .UNINDENT +.SS Salt 3005.2 Release Notes +.sp +Version 3005.2 is a CVE security fix release for \fI\%3005\fP\&. +.SS Changed +.INDENT 0.0 +.IP \(bu 2 +Additional required package upgrades +.INDENT 2.0 +.IP \(bu 2 +It\(aqs now \fIpyzmq>=20.0.0\fP on all platforms, and \fI<=22.0.3\fP just for windows. +.IP \(bu 2 +Upgrade to \fIpyopenssl==23.0.0\fP due to the cryptography upgrade. (#63757) +.UNINDENT +.UNINDENT +.SS Security +.INDENT 0.0 +.IP \(bu 2 +fix CVE\-2023\-20897 by catching exception instead of letting exception disrupt connection (cve\-2023\-20897) +.IP \(bu 2 +Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. (cve\-2023\-20898) +.IP \(bu 2 +Upgrade to \fIrequests==2.31.0\fP +.INDENT 2.0 +.TP +.B Due to: +.INDENT 7.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-j8r2\-6x86\-q33q\fP (#64336) +.UNINDENT +.UNINDENT +.IP \(bu 2 +Upgrade to \fIcryptography==41.0.3\(ga(and therefor \(gapyopenssl==23.2.0\fP due to \fI\%https://github.com/advisories/GHSA\-jm77\-qphf\-c4w8\fP) +.sp +Also resolves the following cryptography advisories: +.INDENT 2.0 +.TP +.B Due to: +.INDENT 7.0 +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-5cpq\-8wj7\-hf2v\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-x4qr\-2fvf\-3mr5\fP +.IP \(bu 2 +\fI\%https://github.com/advisories/GHSA\-w7pp\-m8wf\-vj6r\fP +.UNINDENT +.UNINDENT +.sp +There is no security upgrade available for Py3.5 (#64595) +.IP \(bu 2 +Bump to \fIcertifi==2023.07.22\fP due to \fI\%https://github.com/advisories/GHSA\-xqr8\-7jwr\-rhp7\fP +.sp +Python 3.5 cannot get the updated requirements since certifi no longer supports this python version (#64720) +.UNINDENT +.SS Salt 3005.3 Release Notes +.sp +Version 3005.3 is a Bug fix release for \fI\%3005\fP\&. +.SS Changed +.INDENT 0.0 +.IP \(bu 2 +Fix __env__ and improve cache cleaning see more info at pull #65017. (#65002) +.UNINDENT +.SS Security +.INDENT 0.0 +.IP \(bu 2 +Update to \fIgitpython>=3.1.35\fP due to \fI\%https://github.com/advisories/GHSA\-wfm5\-v35h\-vwf4\fP and \fI\%https://github.com/advisories/GHSA\-cwvm\-v4w8\-q58c\fP (#65167) +.UNINDENT +.SS Salt 3005.4 Release Notes +.sp +Version 3005.4 is a CVE security fix release for \fI\%3005\fP\&. +.SS Security +.INDENT 0.0 +.IP \(bu 2 +Fix CVE\-2023\-34049 by ensuring we do not use a predictable name for the script and correctly check returncode of scp command. +This only impacts salt\-ssh users using the pre\-flight option. (cve\-2023\-34049) +.IP \(bu 2 +Bump to \fIcryptography==41.0.4\fP due to \fI\%https://github.com/advisories/GHSA\-v8gr\-m533\-ghj9\fP (#65267) +.IP \(bu 2 +Bump to \fIurllib3==1.26.17\fP or \fIurllib3==2.0.6\fP due to \fI\%https://github.com/advisories/GHSA\-v845\-jxx5\-vc9f\fP (#65334) +.IP \(bu 2 +Bump to \fIgitpython==3.1.37\fP due to \fI\%https://github.com/advisories/GHSA\-cwvm\-v4w8\-q58c\fP (#65383) +.UNINDENT .SS Salt 3004 Release Notes \- Codename Silicon .SS New Features .SS Transactional System Support (MicroOS) diff --git a/doc/man/spm.1 b/doc/man/spm.1 index 774e767ef35f..5e715ca48299 100644 --- a/doc/man/spm.1 +++ b/doc/man/spm.1 @@ -27,7 +27,7 @@ level margin: \\n[rst2man-indent\\n[rst2man-indent-level]] .\" new: \\n[rst2man-indent\\n[rst2man-indent-level]] .in \\n[rst2man-indent\\n[rst2man-indent-level]]u .. -.TH "SPM" "1" "Generated on March 01, 2023 at 10:47:29 PM UTC." "3006.0" "Salt" +.TH "SPM" "1" "Generated on December 12, 2023 at 05:54:17 PM UTC." "3006.5" "Salt" .SH NAME spm \- Salt Package Manager Command .sp diff --git a/doc/ref/configuration/logging/index.rst b/doc/ref/configuration/logging/index.rst index 1d1a0c09732a..d5d8f9cac6b7 100644 --- a/doc/ref/configuration/logging/index.rst +++ b/doc/ref/configuration/logging/index.rst @@ -61,6 +61,13 @@ available in salt are shown in the table below. | all | 0 | Everything | +----------+---------------+--------------------------------------------------------------------------+ +Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all + Available Configuration Settings ================================ diff --git a/doc/ref/configuration/master.rst b/doc/ref/configuration/master.rst index a41a6eed31db..e59efd47f713 100644 --- a/doc/ref/configuration/master.rst +++ b/doc/ref/configuration/master.rst @@ -206,10 +206,13 @@ following the Filesystem Hierarchy Standard (FHS) might set it to moved into the master cachedir (on most platforms, ``/var/cache/salt/master/extmods``). -Directory for custom modules. This directory can contain subdirectories for -each of Salt's module types such as ``runners``, ``output``, ``wheel``, -``modules``, ``states``, ``returners``, ``engines``, ``utils``, etc. -This path is appended to :conf_master:`root_dir`. +Directory where custom modules are synced to. This directory can contain +subdirectories for each of Salt's module types such as ``runners``, +``output``, ``wheel``, ``modules``, ``states``, ``returners``, ``engines``, +``utils``, etc. This path is appended to :conf_master:`root_dir`. + +Note, any directories or files not found in the `module_dirs` location +will be removed from the extension_modules path. .. code-block:: yaml @@ -368,10 +371,24 @@ Set the default timeout for the salt command and api. Default: ``60`` -The loop_interval option controls the seconds for the master's maintenance +The loop_interval option controls the seconds for the master's Maintenance process check cycle. This process updates file server backends, cleans the job cache and executes the scheduler. +``maintenance_interval`` +------------------------ + +.. versionadded:: 3006.0 + +Default: ``3600`` + +Defines how often to restart the master's Maintenance process. + +.. code-block:: yaml + + maintenance_interval: 9600 + + .. conf_master:: output ``output`` @@ -4074,6 +4091,19 @@ This option defines the update interval (in seconds) for s3fs. s3fs_update_interval: 120 +``fileserver_interval`` +*********************** + +.. versionadded:: 3006.0 + +Default: ``3600`` + +Defines how often to restart the master's FilesServerUpdate process. + +.. code-block:: yaml + + fileserver_interval: 9600 + .. _pillar-configuration-master: @@ -4983,6 +5013,7 @@ Default: ``3600`` If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount of time, in seconds, before the cache is considered invalid by a master and a fresh pillar is recompiled and stored. +The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. .. conf_master:: pillar_cache_backend @@ -5429,6 +5460,13 @@ The level of messages to send to the console. See also :conf_log:`log_level`. log_level: warning +Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all + .. conf_master:: log_level_logfile ``log_level_logfile`` @@ -5444,6 +5482,13 @@ it will inherit the level set by :conf_log:`log_level` option. log_level_logfile: warning +Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all + .. conf_master:: log_datefmt ``log_datefmt`` diff --git a/doc/ref/configuration/minion.rst b/doc/ref/configuration/minion.rst index 57af5ce4a3ab..0f99da2f1c0c 100644 --- a/doc/ref/configuration/minion.rst +++ b/doc/ref/configuration/minion.rst @@ -1197,8 +1197,8 @@ seconds each iteration. Default: ``False`` -If the master rejects the minion's public key, retry instead of exiting. -Rejected keys will be handled the same as waiting on acceptance. +If the master denies or rejects the minion's public key, retry instead of +exiting. These keys will be handled the same as waiting on acceptance. .. code-block:: yaml @@ -1305,6 +1305,36 @@ restart. auth_safemode: False +.. conf_minion:: request_channel_timeout + +``request_channel_timeout`` +--------------------------- + +.. versionadded:: 3006.2 + +Default: ``30`` + +The default timeout timeout for request channel requests. This setting can be used to tune minions to better handle long running pillar and file client requests. + +.. code-block:: yaml + + request_channel_timeout: 30 + +``request_channel_tries`` +------------------------- + +.. versionadded:: 3006.2 + +Default: ``3`` + +The default number of times the minion will try request channel requests. This +setting can be used to tune minions to better handle long running pillar and +file client requests by retrying them after a timeout happens. + +.. code-block:: yaml + + request_channel_tries: 3 + .. conf_minion:: ping_interval ``ping_interval`` @@ -2438,10 +2468,7 @@ enabled and can be disabled by changing this value to ``False``. ``saltenv`` will take its value. If both are used, ``environment`` will be ignored and ``saltenv`` will be used. -Normally the minion is not isolated to any single environment on the master -when running states, but the environment can be isolated on the minion side -by statically setting it. Remember that the recommended way to manage -environments is to isolate via the top file. +The default fileserver environment to use when copying files and applying states. .. code-block:: yaml @@ -3281,6 +3308,12 @@ The level of messages to send to the console. See also :conf_log:`log_level`. log_level: warning +Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all .. conf_minion:: log_level_logfile @@ -3297,6 +3330,12 @@ it will inherit the level set by :conf_log:`log_level` option. log_level_logfile: warning +Any log level below the `info` level is INSECURE and may log sensitive data. This currently includes: +#. profile +#. debug +#. trace +#. garbage +#. all .. conf_minion:: log_datefmt diff --git a/doc/ref/file_server/file_roots.rst b/doc/ref/file_server/file_roots.rst index dddd248785bb..8622e4905fa4 100644 --- a/doc/ref/file_server/file_roots.rst +++ b/doc/ref/file_server/file_roots.rst @@ -13,6 +13,15 @@ individual environments can span across multiple directory roots to create overlays and to allow for files to be organized in many flexible ways. +Periodic Restarts +================= + +The file server will restart periodically. The reason for this is to prevent any +files erver backends which may not properly handle resources from endlessly +consuming memory. A notable example of this is using a git backend with the +pygit2 library. How often the file server restarts can be controlled with the +``fileserver_interval`` in your master's config file. + Environments ============ diff --git a/doc/ref/modules/index.rst b/doc/ref/modules/index.rst index 59f7c003aaa8..ca9171383c51 100644 --- a/doc/ref/modules/index.rst +++ b/doc/ref/modules/index.rst @@ -416,6 +416,8 @@ The above example will force the minion to use the :py:mod:`systemd ` module to provide service management, and the :py:mod:`aptpkg ` module to provide package management. +For per-state provider overrides, see documentation on :ref:`state providers `. + .. __: https://github.com/saltstack/salt/issues/new Logging Restrictions diff --git a/doc/ref/states/backup_mode.rst b/doc/ref/states/backup_mode.rst index 1cde050f17be..3d33abde5504 100644 --- a/doc/ref/states/backup_mode.rst +++ b/doc/ref/states/backup_mode.rst @@ -24,6 +24,14 @@ Or it can be set for each file: - source: salt://ssh/sshd_config - backup: minion + +The backup_mode can be set to any of the following options: + +* **minion**: backup to the minion. +* **master**: backup to the master, a planned mode that has not yet been implemented, so does nothing. +* **both**: backup to both. a combination of both master and minion. + + Backed-up Files =============== diff --git a/doc/ref/states/requisites.rst b/doc/ref/states/requisites.rst index e1cc6ec65266..18625faf9516 100644 --- a/doc/ref/states/requisites.rst +++ b/doc/ref/states/requisites.rst @@ -917,6 +917,29 @@ In the above case, ``some_check`` will be run prior to _each_ name -- once for key: not-existing get_return: res +.. versionchanged:: 3006.0 + + Since the ``unless`` requisite utilizes ``cmd.retcode``, certain parameters + included in the state are passed along to ``cmd.retcode``. On occasion this + can cause issues, particularly if the ``shell`` option in a ``user.present`` + is set to /sbin/nologin and this shell is passed along to ``cmd.retcode``. + This would cause ``cmd.retcode`` to run the command using that shell which + would fail regardless of the result of the command. + + By including ``shell`` in ``cmd_opts_exclude``, that parameter would not be + passed along to the call to ``cmd.retcode``. + + .. code-block:: yaml + + jim_nologin: + user.present: + - name: jim + - shell: /sbin/nologin + - unless: + - echo hello world + - cmd_opts_exclude: + - shell + .. _onlyif-requisite: onlyif @@ -1005,11 +1028,33 @@ if the gluster commands return a 0 ret value. key: does-exist get_return: res +.. versionchanged:: 3006.0 + + Since the ``onlyif`` requisite utilizes ``cmd.retcode``, certain parameters + included in the state are passed along to ``cmd.retcode``. On occasion this + can cause issues, particularly if the ``shell`` option in a ``user.present`` + is set to /sbin/nologin and this shell is passed along to ``cmd.retcode``. + This would cause ``cmd.retcode`` to run the command using that shell which + would fail regardless of the result of the command. + + By including ``shell`` in ``cmd_opts_exclude``, that parameter would not be + passed along to the call to ``cmd.retcode``. + + .. code-block:: yaml + + jim_nologin: + user.present: + - name: jim + - shell: /sbin/nologin + - onlyif: + - echo hello world + - cmd_opts_exclude: + - shell .. _creates-requisite: -Creates -------- +creates +~~~~~~~ .. versionadded:: 3001 diff --git a/doc/security/index.rst b/doc/security/index.rst index ab7b048940e6..e5a36381e4e3 100644 --- a/doc/security/index.rst +++ b/doc/security/index.rst @@ -4,7 +4,7 @@ Security disclosure policy ========================== -:email: security@saltstack.com +:email: saltproject-security.pdl@broadcom.com :gpg key ID: 4EA0793D :gpg key fingerprint: ``8ABE 4EFC F0F4 B24B FF2A AF90 D570 F2D3 4EA0 793D`` @@ -114,7 +114,7 @@ Security disclosure policy =i1Tf -----END PGP PUBLIC KEY BLOCK----- -The SaltStack Security Team is available at security@saltstack.com for +The SaltStack Security Team is available at saltproject-security.pdl@broadcom.com for security-related bug reports or questions. We request the disclosure of any security-related bugs or issues be reported @@ -129,7 +129,7 @@ SaltStack takes security and the trust of our customers and users very seriously. Our disclosure policy is intended to resolve security issues as quickly and safely as is possible. -1. A security report sent to security@saltstack.com is assigned to a team +1. A security report sent to saltproject-security.pdl@broadcom.com is assigned to a team member. This person is the primary contact for questions and will coordinate the fix, release, and announcement. diff --git a/doc/topics/beacons/index.rst b/doc/topics/beacons/index.rst index 0c8cde04cf67..311c8549b46b 100644 --- a/doc/topics/beacons/index.rst +++ b/doc/topics/beacons/index.rst @@ -246,7 +246,7 @@ Add the following to ``/srv/reactor/revert.sls``: revert-file: local.state.apply: - - tgt: {{ data['data']['id'] }} + - tgt: {{ data['id'] }} - arg: - maintain_important_file @@ -257,12 +257,6 @@ Add the following to ``/srv/reactor/revert.sls``: to modify the watched file, it is important to ensure the state applied is also :term:`idempotent `. -.. note:: - - The expression ``{{ data['data']['id'] }}`` :ref:`is correct - ` as it matches the event structure :ref:`shown above - `. - State SLS ````````` diff --git a/doc/topics/development/modules/developing.rst b/doc/topics/development/modules/developing.rst index 5e9a9219ac4c..82e8d793bc9f 100644 --- a/doc/topics/development/modules/developing.rst +++ b/doc/topics/development/modules/developing.rst @@ -155,7 +155,7 @@ The following dunder dictionaries are always defined, but may be empty __opts__ -------- -..versionchanged:: 3006.0 +.. versionchanged:: 3006.0 The ``__opts__`` dictionary can now be accessed via :py:mod:`~salt.loader.context``. @@ -248,13 +248,6 @@ executions until the modules are refreshed; such as when :py:func:`saltutil.sync_all ` or :py:func:`state.apply ` are executed. -A great place to see how to use ``__context__`` is in the cp.py module in -salt/modules/cp.py. The fileclient authenticates with the master when it is -instantiated and then is used to copy files to the minion. Rather than create a -new fileclient for each file that is to be copied down, one instance of the -fileclient is instantiated in the ``__context__`` dictionary and is reused for -each file. Here is an example from salt/modules/cp.py: - .. code-block:: python if not "cp.fileclient" in __context__: @@ -303,3 +296,13 @@ Defined in: State __sdb__ ------- Defined in: SDB + + +__file_client__ +--------------- + +.. versionchanged:: 3006.5 + +The ``__file_client__`` dunder was added to states and execution modules. This +enables the use of a file client without haveing to instantiate one in +the module. diff --git a/doc/topics/development/releasenotes.rst b/doc/topics/development/releasenotes.rst new file mode 100644 index 000000000000..ecb2a70115ab --- /dev/null +++ b/doc/topics/development/releasenotes.rst @@ -0,0 +1,71 @@ +.. _releasenotes: + +============= +Release Notes +============= + +You can edit the release notes to highlight a new feature being added +to a given release. The release notes are templatized with Jinja and +are generated at release time. + + +.. _edit-release-note: + +How do I edit the release notes +------------------------------- + +To edit the release notes you need to look in doc/topics/releases/templates +for your given release and edit the template. Do not edit the release note +files in doc/topics/releases/, as this will be written over with the content +in the template file. For example, if you want to add content to the 3006.0 +release notes you would edit the doc/topics/releases/templates/3006.0.md.template +file. Do not edit the changelog portion of the template file, since that is +auto generated with the content generated for the changelog for each release. + + +How to generate the release notes +--------------------------------- + +This step is only used when we need to generate the release notes before releasing. +You should NOT need to run these steps as they are ran in the pipeline, but this +is documented so you can test your changes to the release notes template. + +To generate the release notes requires the `tools` command. The instructions below +will detail how to install and use `tools`. + + +Installing `tools` +.................. + +.. code-block: bash + + python -m pip install -r requirements/static/ci/py3.10/tools.txt + + +To view the output the release notes will produce before generating them +you can run `tools` in draft mode: + +.. code-block:: bash + + tools changelog update-release-notes --draft + +To generate the release notes just remove the `--draft` argument: + +.. code-block:: bash + + tools changelog update-release-notes + + +To specify a specific Salt version you add that version as an argument: + +.. code-block:: bash + + tools changelog update-release-notes 3006.0 + + +To only generate the template for a new release + + +.. code-block:: bash + + tools changelog update-release-notes --template-only diff --git a/doc/topics/packaging/index.rst b/doc/topics/packaging/index.rst index b671aaa6393a..755bf73a54c5 100644 --- a/doc/topics/packaging/index.rst +++ b/doc/topics/packaging/index.rst @@ -13,7 +13,7 @@ are built with the `relenv `_. @@ -36,96 +36,153 @@ How to build onedir only .. code-block:: bash - relenv fetch --python= + relenv fetch --python= #. Create relenv environment: .. code-block:: bash - relenv create --python=3.10.10 + relenv create --python= #. Add Salt into onedir. .. code-block:: bash - path/to//bin/pip install /path/to/salt + /bin/pip install /path/to/salt How to build rpm packages ========================= -#. Install the dependencies: + +#. Ensure you are in the current Salt cloned git repo: .. code-block:: bash - yum -y install python3 python3-pip openssl git rpmdevtools rpmlint systemd-units libxcrypt-compat git + cd -#. (Optional) To build a specific Salt version, you will need to install tools and changelog dependencies: +#. Install the dependencies: .. code-block:: bash - pip install -r requirements/static/ci/py{python_version}/tools.txt + yum -y install python3 python3-pip openssl git rpmdevtools rpmlint systemd-units libxcrypt-compat git gnupg2 jq createrepo rpm-sign rustc cargo epel-release + yum -y install patchelf + pip install awscli .. code-block:: bash - pip install -r requirements/static/ci/py{python_version}/changelog.txt + pip install -r requirements/static/ci/py{python_version}/tools.txt + +#. (Optional) To build a specific Salt version, you will need to install tools and changelog dependencies: -#. Ensure you are in the current Salt cloned git repo: .. code-block:: bash - cd salt + pip install -r requirements/static/ci/py{python_version}/changelog.txt #. (Optional) To build a specific Salt version, run tools and set Salt version: .. code-block:: bash - tools changelog update-rpm + tools changelog update-rpm -#. Run rpmbuild in the Salt repo: +#. Build the RPM: + + Only the arch argument is required, the rest are optional. .. code-block:: bash - rpmbuild -bb --define="_salt_src $(pwd)" $(pwd)/pkg/rpm/salt.spec + tools pkg build rpm --relenv-version --python-version --arch How to build deb packages ========================= -#. Install the dependencies: +#. Ensure you are in the current Salt cloned git repo.: .. code-block:: bash - apt install -y python3 python3-venv python3-pip build-essential devscripts debhelper bash-completion git + cd -#. (Optional) To build a specific Salt version, you will need to install tools and changelog dependencies: +#. Install the dependencies: + + .. code-block:: bash + + apt install -y apt-utils gnupg jq awscli python3 python3-venv python3-pip build-essential devscripts debhelper bash-completion git patchelf rustc .. code-block:: bash pip install -r requirements/static/ci/py{python_version}/tools.txt +#. (Optional) To build a specific Salt version, you will need to install changelog dependencies: + .. code-block:: bash pip install -r requirements/static/ci/py{python_version}/changelog.txt +#. (Optional) To build a specific Salt version, run tools and set Salt version: + + .. code-block:: bash + + tools changelog update-deb + + +#. Build the deb package: + + Only the arch argument is required, the rest are optional. + + .. code-block:: bash + + tools pkg build deb --relenv-version --python-version --arch + + +How to build MacOS packages +=========================== + #. Ensure you are in the current Salt cloned git repo.: .. code-block:: bash - cd salt + cd -#. (Optional) To build a specific Salt version, run tools and set Salt version: +#. Install the dependencies: + + .. code-block:: bash + + pip install -r requirements/static/ci/py{python_version}/tools.txt + +#. Build the MacOS package: + + Only the salt-version argument is required, the rest are optional. + Do note that you will not be able to sign the packages when building them. + + .. code-block:: bash + + tools pkg build macos --salt-version + + +How to build Windows packages +============================= + +#. Ensure you are in the current Salt cloned git repo.: .. code-block:: bash - tools changelog update-deb + cd +#. Install the dependencies: -#. Add a symlink and run debuild in the Salt repo: + .. code-block:: bash + + pip install -r requirements/static/ci/py{python_version}/tools.txt + +#. Build the MacOS package: + + Only the arch and salt-version arguments are required, the rest are optional. + Do note that you will not be able to sign the packages when building them. .. code-block:: bash - ln -sf pkg/debian/ . - debuild -uc -us + tools pkg build windows --salt-version --arch How to access python binary @@ -133,3 +190,12 @@ How to access python binary The python library is available in the install directory of the onedir package. For example on linux the default location would be ``/opt/saltstack/salt/bin/python3``. + +Testing the packages +==================== + +If you want to test your built packages, or any other collection of salt packages post 3006.0, follow :ref:`this guide ` + +.. toctree:: + + testing diff --git a/doc/topics/packaging/testing.rst b/doc/topics/packaging/testing.rst new file mode 100644 index 000000000000..371b59af8558 --- /dev/null +++ b/doc/topics/packaging/testing.rst @@ -0,0 +1,157 @@ +.. _pkging-testing: + +================ +Testing packages +================ + +The package test suite +====================== + +The salt repo provides a test suite for testing basic functionality of our +packages at ``/pkg/tests/``. You can run the install, upgrade, and +downgrade tests. These tests run automatically on most PRs that are submitted +against Salt. + + +.. warning:: + + These tests make destructive changes to your system because they install the + built packages onto the system. They may also install older versions in the + case of upgrades or downgrades. To prevent destructive changes, run the + tests in an isolated system, preferably a virtual machine. + +Setup +===== +In order to run the package tests, the `relenv +`_ onedir and +built packages need to be placed in the correct locations. + +* Place all salt packages for the applicable testing version in + ``/artifacts/pkg/``. +* The onedir must be located under ``/artifacts/``. +* Additionally, to ensure complete parity with Salt's CI/CD suite, place the + ``nox`` virtual environment in ``/.nox/test-pkgs-onedir``. + +The following are a few ways this can be accomplished easily. + +You can ensure parity by installing the package test suite through a few +possible methods: + +* Using ``tools`` +* Downloading individually + +Using ``tools`` +--------------- +Salt has preliminary support for setting up the package test suite in the +``tools`` command suite that is located under ``/tools/testsuite/``. +This method requires the Github CLI tool ``gh`` (https://cli.github.com/) to be properly configured for +interaction with the salt repo. + +#. Install the dependencies using this command: + + .. code-block:: bash + + pip install -r requirements/static/ci/py{python_version}/tools.txt + +#. Download and extract the artifacts with this ``tools`` command: + + + .. code-block:: bash + + tools ts setup --platform {linux|darwin|windows} --slug + --pr --pkg + + The most common use case is to test the packages built on a CI/CD run for a + given PR. To see the possible options for each argument, and other ways to + utilize this command, use the following: + + .. code-block:: bash + + tools ts setup -h + +.. warning:: + + You can only download artifacts from finished workflow runs. This is something + imposed by the GitHub API. + To download artifacts from a running workflow run, you either have to wait for + the finish or cancel it. + +Downloading individually +------------------------ +If the ``tools ts setup`` command doesn't work, you can download, unzip, and +place the artifacts in the correct locations manually. Typically, you want to +test packages built on a CI/CD run for a given PR. This guide explains how to +set up for running the package tests using those artifacts. An analogous process +can be performed for artifacts from nightly builds. + +#. Find and download the artifacts: + + Under the summary page for the most recent actions run for that PR, there is + a list of available artifacts from that run that can be downloaded. Download + the package artifacts by finding + ``salt-.+.--``. For example, the + amd64 deb packages might look like: + ``salt-3006.2+123.01234567890-x86_64-deb``. + + The onedir artifact will look like + ``salt-.+.-onedir--.tar.xz``. For + instance, the macos x86_64 onedir may have the name + ``salt-3006.2+123.01234567890-onedir-darwin-x86_64.tar.xz``. + + .. note:: + + Windows onedir artifacts have ``.zip`` extensions instead of ``tar.xz`` + + While it is optional, it is recommended to download the ``nox`` session + artifact as well. This will have the form of + ``nox--test-pkgs-onedir-``. The amd64 Ubuntu 20.04 nox + artifact may look like ``nox-ubuntu-20.04-test-pkgs-onedir-x86_64``. + +#. Place the artifacts in the correct location: + + Unzip the packages and place them in ``/artifacts/pkg/``. + + You must unzip and untar the onedir packages and place them in + ``/artifacts/``. Windows onedir requires an additional unzip + action. If you set it up correctly, the ``/artifacts/salt`` + directory then contains the uncompressed onedir files. + + Additionally, decompress the ``nox`` artifact and place it under + ``/.nox/``. + +Running the tests +================= +You can run the test suite run if all the artifacts are in the correct location. + +.. note:: + + You need root access to run the test artifacts. Run all nox commands at the + root of the salt repo and as the root user. + +#. Install ``nox``: + + .. code-block:: bash + + pip install nox + +#. Run the install tests: + + .. code-block:: bash + + nox -e test-pkgs-onedir -- install + +#. Run the upgrade or downgrade tests: + + .. code-block:: bash + + nox -e test-pkgs-onedir -- upgrade --prev-version + + You can run the downgrade tests in the same way, replacing ``upgrade`` with + ``downgrade``. + + .. note:: + + If you are testing upgrades or downgrades and classic packages are + available for your system, replace ``upgrade`` or + ``downgrade`` with ``upgrade-classic`` or ``downgrade-classic`` + respectively to test against those versions. diff --git a/doc/topics/reactor/index.rst b/doc/topics/reactor/index.rst index 9372e76d0240..7cdadff29cdc 100644 --- a/doc/topics/reactor/index.rst +++ b/doc/topics/reactor/index.rst @@ -212,6 +212,10 @@ in :ref:`local reactions `, but as noted above this is not very user-friendly. Therefore, the new config schema is recommended if the master is running a supported release. +.. note:: + State ids of reactors for runners and wheels should all be unique. They can + overwrite each other when added to the async queue causing lost reactions. + The below two examples are equivalent: +-------------------------------------------------+-------------------------------------------------+ @@ -248,6 +252,10 @@ Like :ref:`runner reactions `, the old config schema called for wheel reactions to have arguments passed directly under the name of the :ref:`wheel function ` (or in ``arg`` or ``kwarg`` parameters). +.. note:: + State ids of reactors for runners and wheels should all be unique. They can + overwrite each other when added to the async queue causing lost reactions. + The below two examples are equivalent: +-----------------------------------+---------------------------------+ diff --git a/doc/topics/releases/3005.2.rst b/doc/topics/releases/3005.2.rst new file mode 100644 index 000000000000..b267ee3978a0 --- /dev/null +++ b/doc/topics/releases/3005.2.rst @@ -0,0 +1,40 @@ +.. _release-3005-2: + +========================= +Salt 3005.2 Release Notes +========================= + +Version 3005.2 is a CVE security fix release for :ref:`3005 `. + + +Changed +------- + +- Additional required package upgrades + + * It's now `pyzmq>=20.0.0` on all platforms, and `<=22.0.3` just for windows. + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. (#63757) + + +Security +-------- + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection (cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. (cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q (#64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 (#64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 + + Python 3.5 cannot get the updated requirements since certifi no longer supports this python version (#64720) diff --git a/doc/topics/releases/3005.3.rst b/doc/topics/releases/3005.3.rst new file mode 100644 index 000000000000..71d0f184c517 --- /dev/null +++ b/doc/topics/releases/3005.3.rst @@ -0,0 +1,19 @@ +.. _release-3005-3: + +========================= +Salt 3005.3 Release Notes +========================= + +Version 3005.3 is a Bug fix release for :ref:`3005 `. + + +Changed +------- + +- Fix __env__ and improve cache cleaning see more info at pull #65017. (#65002) + + +Security +-------- + +- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c (#65167) diff --git a/doc/topics/releases/3005.4.rst b/doc/topics/releases/3005.4.rst new file mode 100644 index 000000000000..18f904e74a26 --- /dev/null +++ b/doc/topics/releases/3005.4.rst @@ -0,0 +1,17 @@ +.. _release-3005-4: + +========================= +Salt 3005.4 Release Notes +========================= + +Version 3005.4 is a CVE security fix release for :ref:`3005 `. + + +Security +-------- + +- Fix CVE-2023-34049 by ensuring we do not use a predictable name for the script and correctly check returncode of scp command. + This only impacts salt-ssh users using the pre-flight option. (cve-2023-34049) +- Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 (#65267) +- Bump to `urllib3==1.26.17` or `urllib3==2.0.6` due to https://github.com/advisories/GHSA-v845-jxx5-vc9f (#65334) +- Bump to `gitpython==3.1.37` due to https://github.com/advisories/GHSA-cwvm-v4w8-q58c (#65383) diff --git a/doc/topics/releases/3006.0.md b/doc/topics/releases/3006.0.md index 2dc57e110297..1e7cf924d6a8 100644 --- a/doc/topics/releases/3006.0.md +++ b/doc/topics/releases/3006.0.md @@ -1,5 +1,16 @@ (release-3006.0)= -# Salt 3006.0 release notes - Codename Sulfur +# Salt 3006.0 release notes + + + + + ## Onedir packaging @@ -9,6 +20,37 @@ be provided for supported operating systems. See [Upgrade to onedir](https://doc in the [Salt Install Guide](https://docs.saltproject.io/salt/install-guide/en/latest) for information about upgrading from the classic packages to the onedir packages. +## Linux Packaging Salt Master Salt User and Group + +The linux Salt Master packages will now add a Salt user and group +by default. The packages will add the ``user: salt`` config option +to the Salt Master config. The Salt Master service will run as this +user. If you do not want to run the Salt Master as a different user +you can remove the ``user: salt`` config from /etc/salt/master or change +the user to the desired user to run the Salt Master. + +Since the Salt Master is running as a different user, you will need +to ensure you set the owner and group to ``salt`` for your file_roots +and pillar_roots(commonly `/srv/salt` and `/srv/pillar`). + +If you are running a Salt Master, Salt-Api and a Salt Minion on the same +host using the new ``salt`` user and you install a pip dependency into +the onedir environment using ``salt-pip`` or the ``pip`` module, you +need to to chown the directory ``/opt/saltstack/salt/lib//site-packages/`` +with the ``salt`` user and group. + + +## Caveat of salt-pip + +Salt ships with a wrapper script around pip called ``salt-pip``. Users should +use ``salt-pip`` to install any python packages needed to extend Salt. +``salt-pip`` installs python packages into an `extras-3.10` directory located +in the root of the ondir directory, by setting the ``--target`` argument for +pip. This ensures those packages remain installed when upgrading Salt. There is +a known bug in pip when using `--target` where scripts and other non-python +assets may not be cleaned up properly when un-installing. The Salt team is +working to resolve this bug in the up-stream pip project. + ## Dropping support for Python 3.5 and 3.6 @@ -22,7 +64,7 @@ for more information. All netapi clients, which provide the functionality to ``salt-api``, will now be disabled by default as a security precaution. If you use ``salt-api``, you -must add the new ``netapi_enable_clients`` option to your salt master config. +must add the new ``netapi_enable_clients`` option to your salt master config. This is a breaking change and the ``salt-api`` will not function without this new configuration option. See [Enabling netapi client interfaces](https://docs.saltproject.io/en/3006.0/topics/netapi/netapi-enable-clients.html#netapi-enable-clients) for more information. @@ -34,7 +76,7 @@ The migration path from the classic, non-onedir packages to the onedir packages will include: * Repo File: You need to update your repo file to point to the new repo paths - for your platform. After the repo file is updated, upgrade your Salt packages. + for your platform. After the repo file is updated, upgrade your Salt packages. * Pip packages: You need to ensure any 3rd party pip packages are installed in the correct onedir path. This can be accomplished in two ways: @@ -42,7 +84,7 @@ will include: * Using the ``pip.installed`` Salt state. To install python packages into the system python environment, user's must now -provide the ``pip_bin`` or ``bin_env`` to the pip state module. +provide the ``pip_bin`` or ``bin_env`` to the pip state module. For example: @@ -55,6 +97,10 @@ lib-bar: - bin_env: /usr/bin/python3 ``` + ## Changelog ### Removed @@ -67,6 +113,7 @@ lib-bar: This addresses problems with building a one dir build for macOS. It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) +- Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) ### Deprecated @@ -110,6 +157,7 @@ lib-bar: - Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) - Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) - Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) +- Upgraded to `relenv==0.9.0` [#63883](https://github.com/saltstack/salt/issues/63883) ### Fixed @@ -262,6 +310,7 @@ lib-bar: - Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) - Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) - Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) +- Remove mako as a dependency in Windows and macOS. [#62785](https://github.com/saltstack/salt/issues/62785) - Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) - Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) - Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) @@ -299,12 +348,34 @@ lib-bar: - Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) - TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) - Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) +- User responsible for the runner is now correctly reported in the events on the event bus for the runner. [#63148](https://github.com/saltstack/salt/issues/63148) - Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) - Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) +- Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) - Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) +- ``service.status`` on Windows does no longer throws a CommandExecutionError if + the service is not found on the system. It now returns "Not Found" instead. [#63577](https://github.com/saltstack/salt/issues/63577) - When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) - LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) +- Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (``delete_after``) [#63650](https://github.com/saltstack/salt/issues/63650) +- When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) +- have salt.template.compile_template_str cleanup its temp files. [#63724](https://github.com/saltstack/salt/issues/63724) - Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) +- Fixed an issue with generating fingerprints for public keys with different line endings [#63742](https://github.com/saltstack/salt/issues/63742) +- Add `fileserver_interval` and `maintenance_interval` master configuration options. These options control how often to restart the FileServerUpdate and Maintenance processes. Some file server and pillar configurations are known to cause memory leaks over time. A notable example of this are configurations that use pygit2. Salt can not guarantee dependency libraries like pygit2 won't leak memory. Restarting any long running processes that use pygit2 guarantees we can keep the master's memory usage in check. [#63747](https://github.com/saltstack/salt/issues/63747) +- mac_xattr.list and mac_xattr.read will replace undecode-able bytes to avoid raising CommandExecutionError. [#63779](https://github.com/saltstack/salt/issues/63779) [#63779](https://github.com/saltstack/salt/issues/63779) +- Change default GPG keyserver from pgp.mit.edu to keys.openpgp.org. [#63806](https://github.com/saltstack/salt/issues/63806) +- fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) +- Ensure kwargs is passed along to _call_apt when passed into install function. [#63847](https://github.com/saltstack/salt/issues/63847) +- remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) +- add linux_distribution to util to stop dep warning [#63904](https://github.com/saltstack/salt/issues/63904) +- Fix valuerror when trying to close fileclient. Remove usage of __del__ and close the filclient properly. [#63920](https://github.com/saltstack/salt/issues/63920) +- Handle the situation when a sub proxy minion does not init properly, eg. an exception happens, and the sub proxy object is not available. [#63923](https://github.com/saltstack/salt/issues/63923) +- Clarifying documentation for extension_modules configuration option. [#63929](https://github.com/saltstack/salt/issues/63929) +- Windows pkg module now properly handles versions containing strings [#63935](https://github.com/saltstack/salt/issues/63935) +- Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn't used by that function. [#63948](https://github.com/saltstack/salt/issues/63948) +- Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows [#63981](https://github.com/saltstack/salt/issues/63981) +- Hardened permissions on workers.ipc and master_event_pub.ipc. [#64063](https://github.com/saltstack/salt/issues/64063) ### Added @@ -364,6 +435,7 @@ lib-bar: - Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) - Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) - salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) +- Fix running fast tests twice and add git labels to suite. [#63081](https://github.com/saltstack/salt/issues/63081) - Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) - Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) - Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) @@ -377,3 +449,20 @@ lib-bar: enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) - Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) - Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +- Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) +- Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) +- Adding the ability to exclude arguments from a state that end up passed to cmd.retcode when requisites such as onlyif or unless are used. [#63956](https://github.com/saltstack/salt/issues/63956) +- Add --next-release argument to salt/version.py, which prints the next upcoming release. [#64023](https://github.com/saltstack/salt/issues/64023) + + +### Security + +- Upgrade Requirements Due to Security Issues. + + * Upgrade to `cryptography>=39.0.1` due to: + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. + * Update to `markdown-it-py==2.2.0` due to: + * https://github.com/advisories/GHSA-jrwr-5x3p-hvc3 + * https://github.com/advisories/GHSA-vrjv-mxr7-vjf8 [#63882](https://github.com/saltstack/salt/issues/63882) diff --git a/doc/topics/releases/3006.1.md b/doc/topics/releases/3006.1.md new file mode 100644 index 000000000000..2bf2dea1d313 --- /dev/null +++ b/doc/topics/releases/3006.1.md @@ -0,0 +1,52 @@ +(release-3006.1)= +# Salt 3006.1 release notes + + + + + + + + +## Changelog + +### Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) diff --git a/doc/topics/releases/3006.2.md b/doc/topics/releases/3006.2.md new file mode 100644 index 000000000000..7e607729835d --- /dev/null +++ b/doc/topics/releases/3006.2.md @@ -0,0 +1,50 @@ +(release-3006.2)= +# Salt 3006.2 release notes + + + + + + + +## Changelog + +### Fixed + +- In scenarios where PythonNet fails to load, Salt will now fall back to WMI for + gathering grains information [#64897](https://github.com/saltstack/salt/issues/64897) + + +### Security + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection [#cve-2023-20897](https://github.com/saltstack/salt/issues/cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. [#cve-2023-20898](https://github.com/saltstack/salt/issues/cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q [#64336](https://github.com/saltstack/salt/issues/64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) diff --git a/doc/topics/releases/3006.3.md b/doc/topics/releases/3006.3.md new file mode 100644 index 000000000000..63a3174d2433 --- /dev/null +++ b/doc/topics/releases/3006.3.md @@ -0,0 +1,161 @@ +(release-3006.3)= +# Salt 3006.3 release notes + + + + + + + +## Changelog + +### Removed + +- Fedora 36 support was removed because it reached EOL [#64315](https://github.com/saltstack/salt/issues/64315) +- Handle deprecation warnings: + + * Switch to `FullArgSpec` since Py 3.11 no longer has `ArgSpec`, deprecated since Py 3.0 + * Stop using the deprecated `cgi` module + * Stop using the deprecated `pipes` module + * Stop using the deprecated `imp` module [#64553](https://github.com/saltstack/salt/issues/64553) + + +### Changed + +- Replace libnacl with PyNaCl [#64372](https://github.com/saltstack/salt/issues/64372) +- Don't hardcode the python version on the Salt Package tests and on the `pkg/debian/salt-cloud.postinst` file [#64553](https://github.com/saltstack/salt/issues/64553) +- Some more deprecated code fixes: + + * Stop using the deprecated `locale.getdefaultlocale()` function + * Stop accessing deprecated attributes + * `pathlib.Path.__enter__()` usage is deprecated and not required, a no-op [#64565](https://github.com/saltstack/salt/issues/64565) +- Bump to `pyyaml==6.0.1` due to https://github.com/yaml/pyyaml/issues/601 and address lint issues [#64657](https://github.com/saltstack/salt/issues/64657) + + +### Fixed + +- Fix for assume role when used salt-cloud to create aws ec2. [#52501](https://github.com/saltstack/salt/issues/52501) +- fixes aptpkg module by checking for blank comps. [#58667](https://github.com/saltstack/salt/issues/58667) +- `wheel.file_roots.find` is now able to find files in subdirectories of the roots. [#59800](https://github.com/saltstack/salt/issues/59800) +- pkg.latest no longer fails when multiple versions are reported to be installed (e.g. updating the kernel) [#60931](https://github.com/saltstack/salt/issues/60931) +- Do not update the credentials dictionary in `utils/aws.py` while iterating over it, and use the correct delete functionality [#61049](https://github.com/saltstack/salt/issues/61049) +- fixed runner not having a proper exit code when runner modules throw an exception. [#61173](https://github.com/saltstack/salt/issues/61173) +- `pip.list_all_versions` now works with `index_url` and `extra_index_url` [#61610](https://github.com/saltstack/salt/issues/61610) +- speed up file.recurse by using prefix with cp.list_master_dir and remove an un-needed loop. [#61998](https://github.com/saltstack/salt/issues/61998) +- Preserve test=True condition while running sub states. [#62590](https://github.com/saltstack/salt/issues/62590) +- Job returns are only sent to originating master [#62834](https://github.com/saltstack/salt/issues/62834) +- Fixes an issue with failing subsequent state runs with the lgpo state module. + The ``lgpo.get_polcy`` function now returns all boolean settings. [#63296](https://github.com/saltstack/salt/issues/63296) +- Fix SELinux get policy with trailing whitespace [#63336](https://github.com/saltstack/salt/issues/63336) +- Fixes an issue with boolean settings not being reported after being set. The + ``lgpo.get_polcy`` function now returns all boolean settings. [#63473](https://github.com/saltstack/salt/issues/63473) +- Ensure body is returned when salt.utils.http returns something other than 200 with tornado backend. [#63557](https://github.com/saltstack/salt/issues/63557) +- Allow long running pillar and file client requests to finish using request_channel_timeout and request_channel_tries minion config. [#63824](https://github.com/saltstack/salt/issues/63824) +- Fix state_queue type checking to allow int values [#64122](https://github.com/saltstack/salt/issues/64122) +- Call global logger when catching pip.list exceptions in states.pip.installed + Rename global logger `log` to `logger` inside pip_state [#64169](https://github.com/saltstack/salt/issues/64169) +- Fixes permissions created by the Debian and RPM packages for the salt user. + + The salt user created by the Debian and RPM packages to run the salt-master process, was previously given ownership of various directories in a way which compromised the benefits of running the salt-master process as a non-root user. + + This fix sets the salt user to only have write access to those files and + directories required for the salt-master process to run. [#64193](https://github.com/saltstack/salt/issues/64193) +- Fix user.present state when groups is unset to ensure the groups are unchanged, as documented. [#64211](https://github.com/saltstack/salt/issues/64211) +- Fixes issue with MasterMinion class loading configuration from `/etc/salt/minion.d/*.conf. + + The MasterMinion class (used for running orchestraions on master and other functionality) was incorrectly loading configuration from `/etc/salt/minion.d/*.conf`, when it should only load configuration from `/etc/salt/master` and `/etc/salt/master.d/*.conf`. [#64219](https://github.com/saltstack/salt/issues/64219) +- Fixed issue in mac_user.enable_auto_login that caused the user's keychain to be reset at each boot [#64226](https://github.com/saltstack/salt/issues/64226) +- Fixed KeyError in logs when running a state that fails. [#64231](https://github.com/saltstack/salt/issues/64231) +- Fixed x509_v2 `create_private_key`/`create_crl` unknown kwargs: __pub_fun... [#64232](https://github.com/saltstack/salt/issues/64232) +- remove the hard coded python version in error. [#64237](https://github.com/saltstack/salt/issues/64237) +- `salt-pip` now properly errors out when being called from a non `onedir` environment. [#64249](https://github.com/saltstack/salt/issues/64249) +- Ensure we return an error when adding the key fails in the pkgrepo state for debian hosts. [#64253](https://github.com/saltstack/salt/issues/64253) +- Fixed file client private attribute reference on `SaltMakoTemplateLookup` [#64280](https://github.com/saltstack/salt/issues/64280) +- Fix pkgrepo.absent failures on apt-based systems when repo either a) contains a + trailing slash, or b) there is an arch mismatch. [#64286](https://github.com/saltstack/salt/issues/64286) +- Fix detection of Salt codename by "salt_version" execution module [#64306](https://github.com/saltstack/salt/issues/64306) +- Ensure selinux values are handled lowercase [#64318](https://github.com/saltstack/salt/issues/64318) +- Remove the `clr.AddReference`, it is causing an `Illegal characters in path` exception [#64339](https://github.com/saltstack/salt/issues/64339) +- Update `pkg.group_installed` state to support repo options [#64348](https://github.com/saltstack/salt/issues/64348) +- Fix salt user login shell path in Debian packages [#64377](https://github.com/saltstack/salt/issues/64377) +- Allow for multiple user's keys presented when authenticating, for example: root, salt, etc. [#64398](https://github.com/saltstack/salt/issues/64398) +- Fixed an issue with ``lgpo_reg`` where existing entries for the same key in + ``Registry.pol`` were being overwritten in subsequent runs if the value name in + the subesequent run was contained in the existing value name. For example, a + key named ``SetUpdateNotificationLevel`` would be overwritten by a subsequent + run attempting to set ``UpdateNotificationLevel`` [#64401](https://github.com/saltstack/salt/issues/64401) +- Add search for %ProgramData%\Chocolatey\choco.exe to determine if Chocolatey is installed or not [#64427](https://github.com/saltstack/salt/issues/64427) +- Fix regression for user.present on handling groups with dupe GIDs [#64430](https://github.com/saltstack/salt/issues/64430) +- Fix inconsistent use of args in ssh_auth.managed [#64442](https://github.com/saltstack/salt/issues/64442) +- Ensure we raise an error when the name argument is invalid in pkgrepo.managed state for systems using apt. [#64451](https://github.com/saltstack/salt/issues/64451) +- Fix file.symlink will not replace/update existing symlink [#64477](https://github.com/saltstack/salt/issues/64477) +- Fixed salt-ssh state.* commands returning retcode 0 when state/pillar rendering fails [#64514](https://github.com/saltstack/salt/issues/64514) +- Fix pkg.install when using a port in the url. [#64516](https://github.com/saltstack/salt/issues/64516) +- `win_pkg` Fixes an issue runing `pkg.install` with `version=latest` where the + new installer would not be cached if there was already an installer present + with the same name. [#64519](https://github.com/saltstack/salt/issues/64519) +- Added a `test:full` label in the salt repository, which, when selected, will force a full test run. [#64539](https://github.com/saltstack/salt/issues/64539) +- Syndic's async_req_channel uses the asynchornous version of request channel [#64552](https://github.com/saltstack/salt/issues/64552) +- Ensure runners properly save information to job cache. [#64570](https://github.com/saltstack/salt/issues/64570) +- Added salt.ufw to salt-master install on Debian and Ubuntu [#64572](https://github.com/saltstack/salt/issues/64572) +- Added support for Chocolatey 2.0.0+ while maintaining support for older versions [#64622](https://github.com/saltstack/salt/issues/64622) +- Updated semanage fcontext to use --modify if context already exists when adding context [#64625](https://github.com/saltstack/salt/issues/64625) +- Preserve request client socket between requests. [#64627](https://github.com/saltstack/salt/issues/64627) +- Show user friendly message when pillars timeout [#64651](https://github.com/saltstack/salt/issues/64651) +- File client timeouts durring jobs show user friendly errors instead of tracbacks [#64653](https://github.com/saltstack/salt/issues/64653) +- SaltClientError does not log a traceback on minions, we expect these to happen so a user friendly log is shown. [#64729](https://github.com/saltstack/salt/issues/64729) +- Look in location salt is running from, this accounts for running from an unpacked onedir file that has not been installed. [#64877](https://github.com/saltstack/salt/issues/64877) +- Preserve credentials on spawning platforms, minions no longer re-authenticate + with every job when using `multiprocessing=True`. [#64914](https://github.com/saltstack/salt/issues/64914) +- Fixed uninstaller to not remove the `salt` directory by default. This allows + the `extras-3.##` folder to persist so salt-pip dependencies are not wiped out + during an upgrade. [#64957](https://github.com/saltstack/salt/issues/64957) +- fix msteams by adding the missing header that Microsoft is now enforcing. [#64973](https://github.com/saltstack/salt/issues/64973) +- Fix __env__ and improve cache cleaning see more info at pull #65017. [#65002](https://github.com/saltstack/salt/issues/65002) +- Better error message on inconsistent decoded payload [#65020](https://github.com/saltstack/salt/issues/65020) +- Handle permissions access error when calling `lsb_release` with the salt user [#65024](https://github.com/saltstack/salt/issues/65024) +- Allow schedule state module to update schedule when the minion is offline. [#65033](https://github.com/saltstack/salt/issues/65033) +- Fixed creation of wildcard DNS in SAN in `x509_v2` [#65072](https://github.com/saltstack/salt/issues/65072) +- The macOS installer no longer removes the extras directory [#65073](https://github.com/saltstack/salt/issues/65073) + + +### Added + +- Added a script to automate setting up a 2nd minion in a user context on Windows [#64439](https://github.com/saltstack/salt/issues/64439) +- Several fixes to the CI workflow: + + * Don't override the `on` Jinja block on the `ci.yaml` template. This enables reacting to labels getting added/removed + to/from pull requests. + * Switch to using `tools` and re-use the event payload available instead of querying the GH API again to get the pull + request labels + * Concentrate test selection by labels to a single place + * Enable code coverage on pull-requests by setting the `test:coverage` label [#64547](https://github.com/saltstack/salt/issues/64547) + + +### Security + +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `aiohttp==3.8.5` due to https://github.com/advisories/GHSA-45c4-8wx5-qw6w [#64687](https://github.com/saltstack/salt/issues/64687) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) +- Update to `gitpython>=3.1.32` due to https://github.com/advisories/GHSA-pr76-5cm5-w9cj [#64988](https://github.com/saltstack/salt/issues/64988) diff --git a/doc/topics/releases/3006.4.md b/doc/topics/releases/3006.4.md new file mode 100644 index 000000000000..83478f1cc994 --- /dev/null +++ b/doc/topics/releases/3006.4.md @@ -0,0 +1,29 @@ +(release-3006.4)= +# Salt 3006.4 release notes + + + + + + + +## Changelog + +### Security + +- Fix CVE-2023-34049 by ensuring we do not use a predictable name for the script and correctly check returncode of scp command. + This only impacts salt-ssh users using the pre-flight option. [#cve-2023-34049](https://github.com/saltstack/salt/issues/cve-2023-34049) +- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65163](https://github.com/saltstack/salt/issues/65163) +- Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 [#65268](https://github.com/saltstack/salt/issues/65268) +- Upgrade relenv to 0.13.12 to address CVE-2023-4807 [#65316](https://github.com/saltstack/salt/issues/65316) +- Bump to `urllib3==1.26.17` or `urllib3==2.0.6` due to https://github.com/advisories/GHSA-v845-jxx5-vc9f [#65334](https://github.com/saltstack/salt/issues/65334) +- Bump to `gitpython==3.1.37` due to https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65383](https://github.com/saltstack/salt/issues/65383) diff --git a/doc/topics/releases/3006.5.md b/doc/topics/releases/3006.5.md new file mode 100644 index 000000000000..9e97f72382dd --- /dev/null +++ b/doc/topics/releases/3006.5.md @@ -0,0 +1,93 @@ +(release-3006.5)= +# Salt 3006.5 release notes + + + + + + + +## Changelog + +### Removed + +- Tech Debt - support for pysss removed due to functionality addition in Python 3.3 [#65029](https://github.com/saltstack/salt/issues/65029) + + +### Fixed + +- Improved error message when state arguments are accidentally passed as a string [#38098](https://github.com/saltstack/salt/issues/38098) +- Allow `pip.install` to create a log file that is passed in if the parent directory is writeable [#44722](https://github.com/saltstack/salt/issues/44722) +- Fixed merging of complex pillar overrides with salt-ssh states [#59802](https://github.com/saltstack/salt/issues/59802) +- Fixed gpg pillar rendering with salt-ssh [#60002](https://github.com/saltstack/salt/issues/60002) +- Made salt-ssh states not re-render pillars unnecessarily [#62230](https://github.com/saltstack/salt/issues/62230) +- Made Salt maintain options in Debian package repo definitions [#64130](https://github.com/saltstack/salt/issues/64130) +- Migrated all [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts). + + * `tasks/docs.py` -> `tools/precommit/docs.py` + * `tasks/docstrings.py` -> `tools/precommit/docstrings.py` + * `tasks/loader.py` -> `tools/precommit/loader.py` + * `tasks/filemap.py` -> `tools/precommit/filemap.py` [#64374](https://github.com/saltstack/salt/issues/64374) +- Fix salt user login shell path in Debian packages [#64377](https://github.com/saltstack/salt/issues/64377) +- Fill out lsb_distrib_xxxx (best estimate) grains if problems with retrieving lsb_release data [#64473](https://github.com/saltstack/salt/issues/64473) +- Fixed an issue in the ``file.directory`` state where the ``children_only`` keyword + argument was not being respected. [#64497](https://github.com/saltstack/salt/issues/64497) +- Move salt.ufw to correct location /etc/ufw/applications.d/ [#64572](https://github.com/saltstack/salt/issues/64572) +- Fixed salt-ssh stacktrace when retcode is not an integer [#64575](https://github.com/saltstack/salt/issues/64575) +- Fixed SSH shell seldomly fails to report any exit code [#64588](https://github.com/saltstack/salt/issues/64588) +- Fixed some issues in x509_v2 execution module private key functions [#64597](https://github.com/saltstack/salt/issues/64597) +- Fixed grp.getgrall() in utils/user.py causing performance issues [#64888](https://github.com/saltstack/salt/issues/64888) +- Fix user.list_groups omits remote groups via sssd, etc. [#64953](https://github.com/saltstack/salt/issues/64953) +- Ensure sync from _grains occurs before attempting pillar compilation in case custom grain used in pillar file [#65027](https://github.com/saltstack/salt/issues/65027) +- Moved gitfs locks to salt working dir to avoid lock wipes [#65086](https://github.com/saltstack/salt/issues/65086) +- Only attempt to create a keys directory when `--gen-keys` is passed to the `salt-key` CLI [#65093](https://github.com/saltstack/salt/issues/65093) +- Fix nonce verification, request server replies do not stomp on eachother. [#65114](https://github.com/saltstack/salt/issues/65114) +- speed up yumpkg list_pkgs by not requiring digest or signature verification on lookup. [#65152](https://github.com/saltstack/salt/issues/65152) +- Fix pkg.latest failing on windows for winrepo packages where the package is already up to date [#65165](https://github.com/saltstack/salt/issues/65165) +- Ensure __kwarg__ is preserved when checking for kwargs. This change affects proxy minions when used with Deltaproxy, which had kwargs popped when targeting multiple minions id. [#65179](https://github.com/saltstack/salt/issues/65179) +- Fixes traceback when state id is an int in a reactor SLS file. [#65210](https://github.com/saltstack/salt/issues/65210) +- Install logrotate config as /etc/logrotate.d/salt-common for Debian packages + Remove broken /etc/logrotate.d/salt directory from 3006.3 if it exists. [#65231](https://github.com/saltstack/salt/issues/65231) +- Use ``sha256`` as the default ``hash_type``. It has been the default since Salt v2016.9 [#65287](https://github.com/saltstack/salt/issues/65287) +- Preserve ownership on log rotation [#65288](https://github.com/saltstack/salt/issues/65288) +- Ensure that the correct value of jid_inclue is passed if the argument is included in the passed keyword arguments. [#65302](https://github.com/saltstack/salt/issues/65302) +- Uprade relenv to 0.14.2 + - Update openssl to address CVE-2023-5363. + - Fix bug in openssl setup when openssl binary can't be found. + - Add M1 mac support. [#65316](https://github.com/saltstack/salt/issues/65316) +- Fix regex for filespec adding/deleting fcontext policy in selinux [#65340](https://github.com/saltstack/salt/issues/65340) +- Ensure CLI options take priority over Saltfile options [#65358](https://github.com/saltstack/salt/issues/65358) +- Test mode for state function `saltmod.wheel` no longer set's `result` to `(None,)` [#65372](https://github.com/saltstack/salt/issues/65372) +- Client only process events which tag conforms to an event return. [#65400](https://github.com/saltstack/salt/issues/65400) +- Fixes an issue setting user or machine policy on Windows when the Group Policy + directory is missing [#65411](https://github.com/saltstack/salt/issues/65411) +- Fix regression in file module which was not re-using a file client. [#65450](https://github.com/saltstack/salt/issues/65450) +- pip.installed state will now properly fail when a specified user does not exists [#65458](https://github.com/saltstack/salt/issues/65458) +- Publish channel connect callback method properly closes it's request channel. [#65464](https://github.com/saltstack/salt/issues/65464) +- Ensured the pillar in SSH wrapper modules is the same as the one used in template rendering when overrides are passed [#65483](https://github.com/saltstack/salt/issues/65483) +- Fix file.comment ignore_missing not working with multiline char [#65501](https://github.com/saltstack/salt/issues/65501) +- Warn when an un-closed transport client is being garbage collected. [#65554](https://github.com/saltstack/salt/issues/65554) +- Only generate the HMAC's for ``libssl.so.1.1`` and ``libcrypto.so.1.1`` if those files exist. [#65581](https://github.com/saltstack/salt/issues/65581) +- Fixed an issue where Salt Cloud would fail if it could not delete lingering + PAexec binaries [#65584](https://github.com/saltstack/salt/issues/65584) + + +### Added + +- Added Salt support for Debian 12 [#64223](https://github.com/saltstack/salt/issues/64223) +- Added Salt support for Amazon Linux 2023 [#64455](https://github.com/saltstack/salt/issues/64455) + + +### Security + +- Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 [#65268](https://github.com/saltstack/salt/issues/65268) +- Bump to `cryptography==41.0.7` due to https://github.com/advisories/GHSA-jfhm-5ghh-2f97 [#65643](https://github.com/saltstack/salt/issues/65643) diff --git a/doc/topics/releases/templates/3006.0.md.template b/doc/topics/releases/templates/3006.0.md.template new file mode 100644 index 000000000000..29d4e41c49e6 --- /dev/null +++ b/doc/topics/releases/templates/3006.0.md.template @@ -0,0 +1,99 @@ +(release-3006.0)= +# Salt 3006.0 release notes{{ unreleased }} +{{ warning }} + + + +## Onedir packaging + +Going forward from the 3006.0 release, the Salt Project will only provide onedir +packages to install or upgrade Salt. The classic, non-onedir packages will not +be provided for supported operating systems. See [Upgrade to onedir](https://docs.saltproject.io/salt/install-guide/en/latest/topics/upgrade-to-onedir.html) +in the [Salt Install Guide](https://docs.saltproject.io/salt/install-guide/en/latest) for information about upgrading from the classic packages to the onedir +packages. + +## Linux Packaging Salt Master Salt User and Group + +The linux Salt Master packages will now add a Salt user and group +by default. The packages will add the ``user: salt`` config option +to the Salt Master config. The Salt Master service will run as this +user. If you do not want to run the Salt Master as a different user +you can remove the ``user: salt`` config from /etc/salt/master or change +the user to the desired user to run the Salt Master. + +Since the Salt Master is running as a different user, you will need +to ensure you set the owner and group to ``salt`` for your file_roots +and pillar_roots(commonly `/srv/salt` and `/srv/pillar`). + +If you are running a Salt Master, Salt-Api and a Salt Minion on the same +host using the new ``salt`` user and you install a pip dependency into +the onedir environment using ``salt-pip`` or the ``pip`` module, you +need to to chown the directory ``/opt/saltstack/salt/lib//site-packages/`` +with the ``salt`` user and group. + + +## Caveat of salt-pip + +Salt ships with a wrapper script around pip called ``salt-pip``. Users should +use ``salt-pip`` to install any python packages needed to extend Salt. +``salt-pip`` installs python packages into an `extras-3.10` directory located +in the root of the ondir directory, by setting the ``--target`` argument for +pip. This ensures those packages remain installed when upgrading Salt. There is +a known bug in pip when using `--target` where scripts and other non-python +assets may not be cleaned up properly when un-installing. The Salt team is +working to resolve this bug in the up-stream pip project. + + +## Dropping support for Python 3.5 and 3.6 + +Python 3.5 and 3.6 will no longer be supported by Salt since they +are end of life. Going forward our policy will be to align with Python's +supported versions. See [Salt support for Python versions](https://docs.saltproject.io/salt/install-guide/en/latest/topics/salt-python-version-support.html) +for more information. + + +## All salt-api functionality disabled by default + +All netapi clients, which provide the functionality to ``salt-api``, will now +be disabled by default as a security precaution. If you use ``salt-api``, you +must add the new ``netapi_enable_clients`` option to your salt master config. +This is a breaking change and the ``salt-api`` will not function without this +new configuration option. See [Enabling netapi client interfaces](https://docs.saltproject.io/en/3006.0/topics/netapi/netapi-enable-clients.html#netapi-enable-clients) +for more information. + + +## How do I migrate to the onedir packages? + +The migration path from the classic, non-onedir packages to the onedir packages +will include: + +* Repo File: You need to update your repo file to point to the new repo paths + for your platform. After the repo file is updated, upgrade your Salt packages. +* Pip packages: You need to ensure any 3rd party pip packages are installed in + the correct onedir path. This can be accomplished in two ways: + + * ``salt-pip install `` + * Using the ``pip.installed`` Salt state. + +To install python packages into the system python environment, user's must now +provide the ``pip_bin`` or ``bin_env`` to the pip state module. + +For example: + +```yaml +lib-foo: + pip.installed: + - pip_bin: /usr/bin/pip3 +lib-bar: + pip.installed: + - bin_env: /usr/bin/python3 +``` + + +## Changelog +{{ changelog }} diff --git a/doc/topics/releases/templates/3006.1.md.template b/doc/topics/releases/templates/3006.1.md.template new file mode 100644 index 000000000000..f5302a4eab21 --- /dev/null +++ b/doc/topics/releases/templates/3006.1.md.template @@ -0,0 +1,15 @@ +(release-3006.1)= +# Salt 3006.1 release notes{{ unreleased }} +{{ warning }} + + + + + +## Changelog +{{ changelog }} diff --git a/doc/topics/releases/templates/3006.2.md.template b/doc/topics/releases/templates/3006.2.md.template new file mode 100644 index 000000000000..959ae89599b8 --- /dev/null +++ b/doc/topics/releases/templates/3006.2.md.template @@ -0,0 +1,14 @@ +(release-3006.2)= +# Salt 3006.2 release notes{{ unreleased }} +{{ warning }} + + + + +## Changelog +{{ changelog }} diff --git a/doc/topics/releases/templates/3006.3.md.template b/doc/topics/releases/templates/3006.3.md.template new file mode 100644 index 000000000000..3a88f4fb10d3 --- /dev/null +++ b/doc/topics/releases/templates/3006.3.md.template @@ -0,0 +1,14 @@ +(release-3006.3)= +# Salt 3006.3 release notes{{ unreleased }} +{{ warning }} + + + + +## Changelog +{{ changelog }} diff --git a/doc/topics/releases/templates/3006.4.md.template b/doc/topics/releases/templates/3006.4.md.template new file mode 100644 index 000000000000..a11ede9bf4c8 --- /dev/null +++ b/doc/topics/releases/templates/3006.4.md.template @@ -0,0 +1,14 @@ +(release-3006.4)= +# Salt 3006.4 release notes{{ unreleased }} +{{ warning }} + + + + +## Changelog +{{ changelog }} diff --git a/doc/topics/releases/templates/3006.5.md.template b/doc/topics/releases/templates/3006.5.md.template new file mode 100644 index 000000000000..1750e9a85172 --- /dev/null +++ b/doc/topics/releases/templates/3006.5.md.template @@ -0,0 +1,14 @@ +(release-3006.5)= +# Salt 3006.5 release notes{{ unreleased }} +{{ warning }} + + + + +## Changelog +{{ changelog }} diff --git a/doc/topics/tutorials/writing_tests.rst b/doc/topics/tutorials/writing_tests.rst index c7dc82069d37..244091c663ea 100644 --- a/doc/topics/tutorials/writing_tests.rst +++ b/doc/topics/tutorials/writing_tests.rst @@ -452,6 +452,69 @@ can be used ) +Test Groups +=========== +Salt has four groups + +* fast - Tests that are ~10s or faster. Fast tests make up ~75% of tests and can run in 10 to 20 minutes. +* slow - Tests that are ~10s or slower. +* core - Tests of any speed that test the root parts of salt. +* flaky-jail - Test that need to be temporarily skipped. + + +Pytest Decorators + +* @pytest.mark.slow_test +* @pytest.mark.core_test +* @pytest.mark.flaky_jail + +.. code-block:: python + + @pytest.mark.core_test + def test_ping(self): + """ + test.ping + """ + self.assertTrue(self.run_function("test.ping")) + +You can also mark all the tests in file. + +.. code-block:: python + + pytestmark = [ + pytest.mark.core_test, + ] + + + def test_ping(self): + """ + test.ping + """ + self.assertTrue(self.run_function("test.ping")) + + + def test_ping2(self): + """ + test.ping + """ + for _ in range(10): + self.assertTrue(self.run_function("test.ping")) + +You can enable or disable test groups locally by passing there respected flag: + +* --no-fast-tests +* --slow-tests +* --core-tests +* --flaky-jail + +In your PR you can enable or disable test groups by setting a label. +All thought the fast, slow and core tests specified in the change file will always run. + +* test:no-fast +* test:slow +* test:core +* test:flaky-jail + Automated Test Runs =================== diff --git a/doc/topics/windows/multi-minion.rst b/doc/topics/windows/multi-minion.rst new file mode 100644 index 000000000000..17f50e962599 --- /dev/null +++ b/doc/topics/windows/multi-minion.rst @@ -0,0 +1,171 @@ +============================= +Multi-minion setup on Windows +============================= + +There may be a scenario where having a minion running in the context of the +current, logged-in user would be useful. For example, the normal minion running +under the service account would perform machine-wide, administrative tasks. The +minion running under the user context could be launched when the user logs in +and would be able to perform configuration tasks as if it were the user itself. + +The steps required to do this are as follows: + +1. Create new root_dir +2. Set root_dir permissions +3. Create directory structure +4. Write minion config +5. Start the minion +6. Register the minion as a service (optional) + +.. note:: + + The Salt Project has created a powershell script that will configure an + additional minion on the system for you. It can be found in the root of the + Salt installation. The script is named ``multi-minion.ps1``. You can get + help on how to use the script by running the following in a PowerShell + prompt: + + ``Get-Help .\multi-minion.ps1 -Detailed`` + +The following guide explains these steps in more detail. + +1. Create new ``root_dir`` +-------------------------- + +The minion requires a root directory to store config, cache, logs, etc. The user +must have full permissions to this directory. The easiest way to do this is to +put the ``root_dir`` in the Local AppData directory (``$env:LocalAppData``). + +.. code-block:: powershell + + New-Item -Path "$env:LocalAppData\Salt Project\Salt" -Type Directory + +2. Set ``root_dir`` permissions +------------------------------- + +The user running Salt requires full access to the ``root_dir``. If you have +placed the root_dir in a location that the user does not have access to, you'll +need to give the user full permissions to that directory. Replace the + in this example with your own configuration information. + +.. code-block:: powershell + + $RootDir = "" + $User = "" + $acl = Get-Acl -Path "$RootDir" + $access_rule = New-Object System.Security.AccessControl.FileSystemAccessRule($User, "Modify", "Allow") + $acl.AddAccessRule($access_rule) + Set-Acl -Path "$RootDir" -AclObject $acl + +3. Create directory structure +----------------------------- + +Salt expects a certain directory structure to be present to avoid unnecessary +messages in the logs. This is usually handled by the installer. Since you're +running your own instance, you need to do it. Make sure the following +directories are present: + + - root_dir\\conf\\minion.d + - root_dir\\conf\\pki + - root_dir\\var\\log\\salt + - root_dir\\var\\run + - root_dir\\var\\cache\\salt\\minion\\extmods\\grains + - root_dir\\var\\cache\\salt\\minion\\proc + +.. code-block:: powershell + + $RootDir = "" + $cache_dir = "$RootDir\var\cache\salt\minion" + New-Item -Path "$RootDir\conf" -Type Directory + New-Item -Path "$RootDir\conf\minion.d" -Type Directory + New-Item -Path "$RootDir\conf\pki" -Type Directory + New-Item -Path "$RootDir\var\log\salt" -Type Directory + New-Item -Path "$RootDir\var\run" -Type Directory + New-Item -Path "$cache_dir\extmods\grains" -Type Directory + New-Item -Path "$cache_dir\proc" -Type Directory + +4. Write minion config +---------------------- + +The minion will need its own config, separate from the system minion config. +This config tells the minion where everything is located in the file structure +and also defines the master and minion id. Create a minion config file named +``minion`` in the conf directory. + +.. code-block:: powershell + + New-Item -Path "$env:LocalAppData\Salt Project\Salt\conf\minion" -Type File + +Make sure the config file has at least the following contents: + +.. code-block:: yaml + + master: + id: + + root_dir: + log_file: \val\log\salt\minion + utils_dirs: + - \var\cache\salt\minion\extmods + winrepo_dir: \srv\salt\win\repo + winrepo_dir_ng: \srv\salt\win\repo-ng + + file_roots: + base: + - \srv\salt + - \srv\spm\salt + + pillar_roots: + base: + - \srv\pillar + - \srv\spm\pillar + + thorium_roots: + base: + - \srv\thorium + +5. Run the minion +----------------- + +Everything is now set up to run the minion. You can start the minion as you +would normally, but you need to specify the full path to the config file you +created above. + +.. code-block:: powershell + + salt-minion.exe -c \conf + +6. Register the minion as a service (optional) +---------------------------------------------- + +You can also register the minion as a service, but you need to understand the +implications of doing so. + +- You will need to have administrator privileges to register this minion + service. +- You will need the password to the user account that will be running the + minion. +- If the user password changes, you will have to update the service definition + to reflect the new password. +- The minion runs all the time under the user context, whether that user is + logged in or not. +- This requires great trust from the user as the minion will be able to perform + operations under the user's name without the user knowing, whether they are + logged in or not. +- If you decide to run the new minion under the Local System account, it might + as well just be a normal minion. +- The helper script does not support registering the second minion as a service. + +To register the minion as a service, use the ``ssm.exe`` binary that came with +the Salt installation. Run the following commands, replacing ````, +````, ````, and ```` as necessary: + +.. code-block:: powershell + + ssm.exe install "salt-minion.exe" "-c `"\conf`" -l quiet" + ssm.exe set Description "Salt Minion " + ssm.exe set Start SERVICE_AUTO_START + ssm.exe set AppStopMethodConsole 24000 + ssm.exe set AppStopMethodWindow 2000 + ssm.exe set AppRestartDelay 60000 + ssm.exe set ObjectName ".\" "" diff --git a/noxfile.py b/noxfile.py index 60bd2b99d20d..0baf4cc537ae 100644 --- a/noxfile.py +++ b/noxfile.py @@ -6,14 +6,15 @@ """ # pylint: disable=resource-leakage,3rd-party-module-not-gated +import contextlib import datetime +import glob import gzip import json import os import pathlib import shutil import sqlite3 -import subprocess import sys import tarfile import tempfile @@ -192,21 +193,12 @@ def _get_pydir(session): return "py{}.{}".format(*version_info) -def _get_pip_requirements_file(session, transport, crypto=None, requirements_type="ci"): +def _get_pip_requirements_file(session, crypto=None, requirements_type="ci"): assert requirements_type in ("ci", "pkg") pydir = _get_pydir(session) if IS_WINDOWS: if crypto is None: - _requirements_file = os.path.join( - "requirements", - "static", - requirements_type, - pydir, - "{}-windows.txt".format(transport), - ) - if os.path.exists(_requirements_file): - return _requirements_file _requirements_file = os.path.join( "requirements", "static", requirements_type, pydir, "windows.txt" ) @@ -220,15 +212,6 @@ def _get_pip_requirements_file(session, transport, crypto=None, requirements_typ session.error("Could not find a windows requirements file for {}".format(pydir)) elif IS_DARWIN: if crypto is None: - _requirements_file = os.path.join( - "requirements", - "static", - requirements_type, - pydir, - "{}-darwin.txt".format(transport), - ) - if os.path.exists(_requirements_file): - return _requirements_file _requirements_file = os.path.join( "requirements", "static", requirements_type, pydir, "darwin.txt" ) @@ -242,15 +225,6 @@ def _get_pip_requirements_file(session, transport, crypto=None, requirements_typ session.error("Could not find a darwin requirements file for {}".format(pydir)) elif IS_FREEBSD: if crypto is None: - _requirements_file = os.path.join( - "requirements", - "static", - requirements_type, - pydir, - "{}-freebsd.txt".format(transport), - ) - if os.path.exists(_requirements_file): - return _requirements_file _requirements_file = os.path.join( "requirements", "static", requirements_type, pydir, "freebsd.txt" ) @@ -264,15 +238,6 @@ def _get_pip_requirements_file(session, transport, crypto=None, requirements_typ session.error("Could not find a freebsd requirements file for {}".format(pydir)) else: if crypto is None: - _requirements_file = os.path.join( - "requirements", - "static", - requirements_type, - pydir, - "{}-linux.txt".format(transport), - ) - if os.path.exists(_requirements_file): - return _requirements_file _requirements_file = os.path.join( "requirements", "static", requirements_type, pydir, "linux.txt" ) @@ -286,13 +251,15 @@ def _get_pip_requirements_file(session, transport, crypto=None, requirements_typ session.error("Could not find a linux requirements file for {}".format(pydir)) -def _upgrade_pip_setuptools_and_wheel(session, upgrade=True, onedir=False): +def _upgrade_pip_setuptools_and_wheel(session, upgrade=True): if SKIP_REQUIREMENTS_INSTALL: session.log( "Skipping Python Requirements because SKIP_REQUIREMENTS_INSTALL was found in the environ" ) return False + env = os.environ.copy() + env["PIP_CONSTRAINT"] = str(REPO_ROOT / "requirements" / "constraints.txt") install_command = [ "python", "-m", @@ -302,26 +269,13 @@ def _upgrade_pip_setuptools_and_wheel(session, upgrade=True, onedir=False): ] if upgrade: install_command.append("-U") - if onedir: - requirements = [ - "pip>=22.3.1,<23.0", - # https://github.com/pypa/setuptools/commit/137ab9d684075f772c322f455b0dd1f992ddcd8f - "setuptools>=65.6.3,<66", - "wheel", - ] - else: - requirements = [ - "pip>=20.2.4,<21.2", - "setuptools!=50.*,!=51.*,!=52.*,<59", - ] - install_command.extend(requirements) - session_run_always(session, *install_command, silent=PIP_INSTALL_SILENT) + install_command.extend(["setuptools", "pip", "wheel"]) + session_run_always(session, *install_command, silent=PIP_INSTALL_SILENT, env=env) return True def _install_requirements( session, - transport, *extra_requirements, requirements_type="ci", onedir=False, @@ -329,20 +283,23 @@ def _install_requirements( if onedir and IS_LINUX: session_run_always(session, "python3", "-m", "relenv", "toolchain", "fetch") - if not _upgrade_pip_setuptools_and_wheel(session, onedir=onedir): + if not _upgrade_pip_setuptools_and_wheel(session): return False # Install requirements + env = os.environ.copy() + env["PIP_CONSTRAINT"] = str(REPO_ROOT / "requirements" / "constraints.txt") + requirements_file = _get_pip_requirements_file( - session, transport, requirements_type=requirements_type + session, requirements_type=requirements_type ) install_command = ["--progress-bar=off", "-r", requirements_file] - session.install(*install_command, silent=PIP_INSTALL_SILENT) + session.install(*install_command, silent=PIP_INSTALL_SILENT, env=env) if extra_requirements: install_command = ["--progress-bar=off"] install_command += list(extra_requirements) - session.install(*install_command, silent=PIP_INSTALL_SILENT) + session.install(*install_command, silent=PIP_INSTALL_SILENT, env=env) if EXTRA_REQUIREMENTS_INSTALL: session.log( @@ -354,30 +311,46 @@ def _install_requirements( # we're already using, we want to maintain the locked version install_command = ["--progress-bar=off", "--constraint", requirements_file] install_command += EXTRA_REQUIREMENTS_INSTALL.split() - session.install(*install_command, silent=PIP_INSTALL_SILENT) + session.install(*install_command, silent=PIP_INSTALL_SILENT, env=env) return True def _install_coverage_requirement(session): if SKIP_REQUIREMENTS_INSTALL is False: + env = os.environ.copy() + env["PIP_CONSTRAINT"] = str(REPO_ROOT / "requirements" / "constraints.txt") coverage_requirement = COVERAGE_REQUIREMENT if coverage_requirement is None: - coverage_requirement = "coverage==5.2" + coverage_requirement = "coverage==7.3.1" + if IS_LINUX: + distro_slug = os.environ.get("TOOLS_DISTRO_SLUG") + if distro_slug is not None and distro_slug in ( + "centos-7", + "debian-10", + "photonos-3", + ): + # Keep the old coverage requirement version since the new one, on these + # plaforms turns the test suite quite slow. + # Unit tests don't finish before the 5 hours timeout when they should + # finish within 1 to 2 hours. + coverage_requirement = "coverage==5.5" session.install( - "--progress-bar=off", coverage_requirement, silent=PIP_INSTALL_SILENT + "--progress-bar=off", + coverage_requirement, + silent=PIP_INSTALL_SILENT, + env=env, ) -def _run_with_coverage(session, *test_cmd, env=None): +def _run_with_coverage(session, *test_cmd, env=None, on_rerun=False): _install_coverage_requirement(session) - session.run("coverage", "erase") + if on_rerun is False: + session.run("coverage", "erase") if env is None: env = {} - coverage_base_env = {} - sitecustomize_dir = session.run( "salt-factories", "--coverage", silent=True, log=True, stderr=None ) @@ -409,58 +382,36 @@ def _run_with_coverage(session, *test_cmd, env=None): python_path_entries.insert(0, str(sitecustomize_dir)) python_path_env_var = os.pathsep.join(python_path_entries) - # The full path to the .coverage data file. Makes sure we always write - # them to the same directory - coverage_base_env["COVERAGE_FILE"] = COVERAGE_FILE - env.update( { # The updated python path so that sitecustomize is importable "PYTHONPATH": python_path_env_var, # Instruct sub processes to also run under coverage "COVERAGE_PROCESS_START": str(REPO_ROOT / ".coveragerc"), - }, - **coverage_base_env, + # The full path to the .coverage data file. Makes sure we always write + # them to the same directory + "COVERAGE_FILE": COVERAGE_FILE, + } ) - try: - session.run(*test_cmd, env=env) - finally: - if os.environ.get("GITHUB_ACTIONS_PIPELINE", "0") == "0": - # Always combine and generate the XML coverage report - try: - session.run( - "coverage", "combine", "--debug=pathmap", env=coverage_base_env - ) - except CommandFailed: - # Sometimes some of the coverage files are corrupt which would trigger a CommandFailed - # exception - pass - # Generate report for tests code coverage - session.run( - "coverage", - "xml", - "-o", - str(COVERAGE_OUTPUT_DIR.joinpath("tests.xml").relative_to(REPO_ROOT)), - "--omit=salt/*", - "--include=tests/*", - env=coverage_base_env, - ) - # Generate report for salt code coverage - session.run( - "coverage", - "xml", - "-o", - str(COVERAGE_OUTPUT_DIR.joinpath("salt.xml").relative_to(REPO_ROOT)), - "--omit=tests/*", - "--include=salt/*", - env=coverage_base_env, - ) + session.run(*test_cmd, env=env) -def _report_coverage(session): +def _report_coverage( + session, + combine=True, + cli_report=True, + html_report=False, + xml_report=False, + json_report=False, +): _install_coverage_requirement(session) + if not any([combine, cli_report, html_report, xml_report, json_report]): + session.error( + "At least one of combine, cli_report, html_report, xml_report, json_report needs to be True" + ) + env = { # The full path to the .coverage data file. Makes sure we always write # them to the same directory @@ -471,72 +422,117 @@ def _report_coverage(session): if session.posargs: report_section = session.posargs.pop(0) if report_section not in ("salt", "tests"): - session.error("The report section can only be one of 'salt', 'tests'.") + session.error( + f"The report section can only be one of 'salt', 'tests', not: {report_section}" + ) if session.posargs: session.error( "Only one argument can be passed to the session, which is optional " "and is one of 'salt', 'tests'." ) - # Always combine and generate the XML coverage report - try: - session.run("coverage", "combine", env=env) - except CommandFailed: - # Sometimes some of the coverage files are corrupt which would trigger a CommandFailed - # exception - pass - - if not IS_WINDOWS: - # The coverage file might have come from a windows machine, fix paths - with sqlite3.connect(COVERAGE_FILE) as db: - res = db.execute(r"SELECT * FROM file WHERE path LIKE '%salt\%'") - if res.fetchone(): - session_warn( - session, - "Replacing backwards slashes with forward slashes on file " - "paths in the coverage database", - ) - db.execute(r"UPDATE OR IGNORE file SET path=replace(path, '\', '/');") + if combine is True: + coverage_db_files = glob.glob(f"{COVERAGE_FILE}.*") + if coverage_db_files: + with contextlib.suppress(CommandFailed): + # Sometimes some of the coverage files are corrupt which would trigger a CommandFailed + # exception + session.run("coverage", "combine", env=env) + elif os.path.exists(COVERAGE_FILE): + session_warn(session, "Coverage files already combined.") + + if os.path.exists(COVERAGE_FILE) and not IS_WINDOWS: + # Some coverage files might have come from a windows machine, fix paths + with sqlite3.connect(COVERAGE_FILE) as db: + res = db.execute(r"SELECT * FROM file WHERE path LIKE '%salt\%'") + if res.fetchone(): + session_warn( + session, + "Replacing backwards slashes with forward slashes on file " + "paths in the coverage database", + ) + db.execute( + r"UPDATE OR IGNORE file SET path=replace(path, '\', '/');" + ) + + if not os.path.exists(COVERAGE_FILE): + session.error("No coverage files found.") if report_section == "salt": - json_coverage_file = ( - COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage-salt.json" - ) + json_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "salt.json" + xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "salt.xml" + html_coverage_dir = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "salt" cmd_args = [ - "--omit=tests/*", + "--omit=tests/*,tests/pytests/pkg/*", "--include=salt/*", ] elif report_section == "tests": - json_coverage_file = ( - COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage-tests.json" + json_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "tests.json" + xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "tests.xml" + html_coverage_dir = ( + COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "tests" ) cmd_args = [ "--omit=salt/*", - "--include=tests/*", + "--include=tests/*,tests/pytests/pkg/*", ] else: json_coverage_file = ( COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage.json" ) + xml_coverage_file = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "coverage.xml" + html_coverage_dir = COVERAGE_OUTPUT_DIR.relative_to(REPO_ROOT) / "html" / "full" cmd_args = [ - "--include=salt/*,tests/*", + "--include=salt/*,tests/*,tests/pytests/pkg/*", ] - session.run( - "coverage", - "json", - "-o", - str(json_coverage_file), - *cmd_args, - env=env, - ) - session.run( - "coverage", - "report", - *cmd_args, - env=env, - ) + if cli_report: + session.run( + "coverage", + "report", + "--precision=2", + *cmd_args, + env=env, + ) + + if html_report: + session.run( + "coverage", + "html", + "-d", + str(html_coverage_dir), + "--show-contexts", + "--precision=2", + *cmd_args, + env=env, + ) + + if xml_report: + try: + session.run( + "coverage", + "xml", + "-o", + str(xml_coverage_file), + *cmd_args, + env=env, + ) + except CommandFailed: + session_warn( + session, "Failed to generate the source XML code coverage report" + ) + + if json_report: + session.run( + "coverage", + "json", + "-o", + str(json_coverage_file), + "--show-contexts", + *cmd_args, + env=env, + ) @nox.session(python=_PYTHON_VERSIONS, name="test-parametrized") @@ -548,7 +544,7 @@ def test_parametrized(session, coverage, transport, crypto): DO NOT CALL THIS NOX SESSION DIRECTLY """ # Install requirements - if _install_requirements(session, transport): + if _install_requirements(session): if crypto: session_run_always( @@ -565,7 +561,7 @@ def test_parametrized(session, coverage, transport, crypto): install_command = [ "--progress-bar=off", "--constraint", - _get_pip_requirements_file(session, transport, crypto=True), + _get_pip_requirements_file(session, crypto=True), ] install_command.append(crypto) session.install(*install_command, silent=PIP_INSTALL_SILENT) @@ -964,7 +960,7 @@ def test_tornado(session, coverage): """ # Install requirements if _upgrade_pip_setuptools_and_wheel(session): - _install_requirements(session, "zeromq") + _install_requirements(session) session.install( "--progress-bar=off", "tornado==5.0.2", silent=PIP_INSTALL_SILENT ) @@ -993,7 +989,7 @@ def pytest_tornado(session, coverage): session.notify(session_name.replace("pytest-", "test-")) -def _pytest(session, coverage, cmd_args, env=None): +def _pytest(session, coverage, cmd_args, env=None, on_rerun=False): # Create required artifacts directories _create_ci_directories() @@ -1033,16 +1029,21 @@ def _pytest(session, coverage, cmd_args, env=None): return if coverage is True: + _coverage_cmd_args = [] + if "COVERAGE_CONTEXT" in os.environ: + _coverage_cmd_args.append(f"--context={os.environ['COVERAGE_CONTEXT']}") _run_with_coverage( session, "python", "-m", "coverage", "run", + *_coverage_cmd_args, "-m", "pytest", *args, env=env, + on_rerun=on_rerun, ) else: session.run("python", "-m", "pytest", *args, env=env) @@ -1050,11 +1051,14 @@ def _pytest(session, coverage, cmd_args, env=None): def _ci_test(session, transport, onedir=False): # Install requirements - _install_requirements(session, transport, onedir=onedir) + _install_requirements(session, onedir=onedir) env = {} if onedir: env["ONEDIR_TESTRUN"] = "1" chunks = { + "pkg": [ + "tests/pytests/pkg", + ], "unit": [ "tests/unit", "tests/pytests/unit", @@ -1062,9 +1066,13 @@ def _ci_test(session, transport, onedir=False): "functional": [ "tests/pytests/functional", ], - "scenarios": ["tests/pytests/scenarios"], + "scenarios": [ + "tests/pytests/scenarios", + ], } + test_group_number = os.environ.get("TEST_GROUP") or "1" + if not session.posargs: chunk_cmd = [] junit_report_filename = "test-results" @@ -1081,20 +1089,20 @@ def _ci_test(session, transport, onedir=False): for values in chunks.values(): for value in values: chunk_cmd.append(f"--ignore={value}") - junit_report_filename = f"test-results-{chunk}" - runtests_log_filename = f"runtests-{chunk}" + junit_report_filename = f"test-results-{chunk}-grp{test_group_number}" + runtests_log_filename = f"runtests-{chunk}-grp{test_group_number}" else: chunk_cmd = chunks[chunk] - junit_report_filename = f"test-results-{chunk}" - runtests_log_filename = f"runtests-{chunk}" + junit_report_filename = f"test-results-{chunk}-grp{test_group_number}" + runtests_log_filename = f"runtests-{chunk}-grp{test_group_number}" if session.posargs: if session.posargs[0] == "--": session.posargs.pop(0) chunk_cmd.extend(session.posargs) else: chunk_cmd = [chunk] + session.posargs - junit_report_filename = "test-results" - runtests_log_filename = "runtests" + junit_report_filename = f"test-results-grp{test_group_number}" + runtests_log_filename = f"runtests-grp{test_group_number}" rerun_failures = os.environ.get("RERUN_FAILURES", "0") == "1" track_code_coverage = os.environ.get("SKIP_CODE_COVERAGE", "0") == "0" @@ -1135,12 +1143,25 @@ def _ci_test(session, transport, onedir=False): ] + chunk_cmd ) - _pytest(session, coverage=track_code_coverage, cmd_args=pytest_args, env=env) + _pytest( + session, + coverage=track_code_coverage, + cmd_args=pytest_args, + env=env, + on_rerun=True, + ) @nox.session(python=_PYTHON_VERSIONS, name="ci-test") def ci_test(session): - _ci_test(session, "zeromq") + transport = os.environ.get("SALT_TRANSPORT") or "zeromq" + valid_transports = ("zeromq", "tcp") + if transport not in valid_transports: + session.error( + "The value for the SALT_TRANSPORT environment variable can only be " + f"one of: {', '.join(valid_transports)}" + ) + _ci_test(session, transport) @nox.session(python=_PYTHON_VERSIONS, name="ci-test-tcp") @@ -1161,6 +1182,14 @@ def ci_test_onedir(session): ) ) + transport = os.environ.get("SALT_TRANSPORT") or "zeromq" + valid_transports = ("zeromq", "tcp") + if transport not in valid_transports: + session.error( + "The value for the SALT_TRANSPORT environment variable can only be " + f"one of: {', '.join(valid_transports)}" + ) + _ci_test(session, "zeromq", onedir=True) @@ -1182,7 +1211,12 @@ def ci_test_onedir_tcp(session): @nox.session(python="3", name="report-coverage") def report_coverage(session): - _report_coverage(session) + _report_coverage(session, combine=True, cli_report=True) + + +@nox.session(python="3", name="coverage-report") +def coverage_report(session): + _report_coverage(session, combine=True, cli_report=True) @nox.session(python=False, name="decompress-dependencies") @@ -1193,19 +1227,74 @@ def decompress_dependencies(session): "Check cicd/images.yml for what's available." ) distro_slug = session.posargs.pop(0) - if IS_WINDOWS: + if "windows" in distro_slug: nox_dependencies_tarball = f"nox.{distro_slug}.tar.gz" else: nox_dependencies_tarball = f"nox.{distro_slug}.tar.xz" nox_dependencies_tarball_path = REPO_ROOT / nox_dependencies_tarball if not nox_dependencies_tarball_path.exists(): session.error( - f"The {nox_dependencies_tarball} file" + f"The {nox_dependencies_tarball} file " "does not exist. Not decompressing anything." ) session_run_always(session, "tar", "xpf", nox_dependencies_tarball) - nox_dependencies_tarball_path.unlink() + if os.environ.get("DELETE_NOX_ARCHIVE", "0") == "1": + nox_dependencies_tarball_path.unlink() + + session.log("Finding broken 'python' symlinks under '.nox/' ...") + for dirname in os.scandir(REPO_ROOT / ".nox"): + if "windows" not in distro_slug: + scan_path = REPO_ROOT.joinpath(".nox", dirname, "bin") + else: + scan_path = REPO_ROOT.joinpath(".nox", dirname, "Scripts") + script_paths = {str(p): p for p in os.scandir(scan_path)} + fixed_shebang = f"#!{scan_path / 'python'}" + for key in sorted(script_paths): + path = script_paths[key] + if path.is_symlink(): + broken_link = pathlib.Path(path) + resolved_link = os.readlink(path) + if not os.path.isabs(resolved_link): + # Relative symlinks, resolve them + resolved_link = os.path.join(scan_path, resolved_link) + if not os.path.exists(resolved_link): + session.log("The symlink %r looks to be broken", resolved_link) + # This is a broken link, fix it + resolved_link_suffix = resolved_link.split( + f"artifacts{os.sep}salt{os.sep}" + )[-1] + fixed_link = REPO_ROOT.joinpath( + "artifacts", "salt", resolved_link_suffix + ) + session.log( + "Fixing broken symlink in nox virtualenv %r, from %r to %r", + dirname.name, + resolved_link, + str(fixed_link.relative_to(REPO_ROOT)), + ) + broken_link.unlink() + broken_link.symlink_to(fixed_link) + continue + if not path.is_file(): + continue + if "windows" not in distro_slug: + # Let's try to fix shebang's + try: + fpath = pathlib.Path(path) + contents = fpath.read_text().splitlines() + if ( + contents[0].startswith("#!") + and contents[0].endswith("python") + and contents[0] != fixed_shebang + ): + session.log( + "Fixing broken shebang in %r", + str(fpath.relative_to(REPO_ROOT)), + ) + fpath.write_text("\n".join([fixed_shebang] + contents[1:])) + except UnicodeDecodeError: + pass @nox.session(python=False, name="compress-dependencies") @@ -1249,115 +1338,83 @@ def pre_archive_cleanup(session, pkg): if session.posargs: session.error("No additional arguments can be passed to 'pre-archive-cleanup'") version_info = _get_session_python_version_info(session) - if version_info >= (3, 9): - if _upgrade_pip_setuptools_and_wheel(session): - requirements_file = os.path.join( - "requirements", "static", "ci", _get_pydir(session), "tools.txt" - ) - install_command = ["--progress-bar=off", "-r", requirements_file] - session.install(*install_command, silent=PIP_INSTALL_SILENT) + if version_info < (3, 10): + session.error( + "The nox session 'pre-archive-cleanup' needs Python 3.10+ to run." + ) - cmdline = [ - "tools", - "pkg", - "pre-archive-cleanup", - ] - if pkg: - cmdline.append("--pkg") - cmdline.append(".nox") - session_run_always(session, *cmdline) - return + if _upgrade_pip_setuptools_and_wheel(session): + requirements_file = os.path.join( + "requirements", "static", "ci", _get_pydir(session), "tools.txt" + ) + install_command = ["--progress-bar=off", "-r", requirements_file] + session.install(*install_command, silent=PIP_INSTALL_SILENT) - # On windows, we still run Py3.9 - # Let's do the cleanup here, for now. - # This is a copy of the pre_archive_cleanup function in tools/pkg.py + cmdline = [ + "tools", + "pkg", + "pre-archive-cleanup", + ] + if pkg: + cmdline.append("--pkg") + cmdline.append(".nox") + session_run_always(session, *cmdline) - import fnmatch - import shutil - try: - import yaml - except ImportError: - session.error("Please install 'pyyaml'.") - return +@nox.session(python="3", name="combine-coverage") +def combine_coverage(session): + _report_coverage(session, combine=True, cli_report=False) - with open(str(REPO_ROOT / "pkg" / "common" / "env-cleanup-rules.yml")) as rfh: - patterns = yaml.safe_load(rfh.read()) - if pkg: - patterns = patterns["pkg"] - else: - patterns = patterns["ci"] +@nox.session( + python=str(ONEDIR_PYTHON_PATH), + name="combine-coverage-onedir", + venv_params=["--system-site-packages"], +) +def combine_coverage_onedir(session): + _report_coverage(session, combine=True, cli_report=False) - if IS_WINDOWS: - patterns = patterns["windows"] - elif IS_DARWIN: - patterns = patterns["darwin"] - else: - patterns = patterns["linux"] - dir_patterns = set() - for pattern in patterns["dir_patterns"]: - if isinstance(pattern, list): - dir_patterns.update(set(pattern)) - continue - dir_patterns.add(pattern) +@nox.session(python="3", name="create-html-coverage-report") +def create_html_coverage_report(session): + _report_coverage(session, combine=True, cli_report=False, html_report=True) - file_patterns = set() - for pattern in patterns["file_patterns"]: - if isinstance(pattern, list): - file_patterns.update(set(pattern)) - continue - file_patterns.add(pattern) - - for root, dirs, files in os.walk( - str(REPO_ROOT / ".nox"), topdown=True, followlinks=False - ): - for dirname in dirs: - path = pathlib.Path(root, dirname).resolve() - if not path.exists(): - continue - match_path = path.as_posix() - for pattern in dir_patterns: - if fnmatch.fnmatch(str(match_path), pattern): - session.log( - f"Deleting directory: {match_path}; Matching pattern: {pattern!r}" - ) - shutil.rmtree(str(path)) - break - for filename in files: - path = pathlib.Path(root, filename).resolve() - if not path.exists(): - continue - match_path = path.as_posix() - for pattern in file_patterns: - if fnmatch.fnmatch(str(match_path), pattern): - session.log( - f"Deleting file: {match_path}; Matching pattern: {pattern!r}" - ) - try: - os.remove(str(path)) - except FileNotFoundError: - pass - break +def _create_xml_coverage_reports(session): + if session.posargs: + session.error("No arguments are acceptable to this nox session.") + session.posargs.append("salt") + _report_coverage(session, combine=True, cli_report=False, xml_report=True) + session.posargs.append("tests") + _report_coverage(session, combine=True, cli_report=False, xml_report=True) -@nox.session(python="3", name="combine-coverage") -def combine_coverage(session): - _install_coverage_requirement(session) - env = { - # The full path to the .coverage data file. Makes sure we always write - # them to the same directory - "COVERAGE_FILE": str(COVERAGE_FILE), - } - # Always combine and generate the XML coverage report - try: - session.run("coverage", "combine", env=env) - except CommandFailed: - # Sometimes some of the coverage files are corrupt which would trigger a CommandFailed - # exception - pass +@nox.session(python="3", name="create-xml-coverage-reports") +def create_xml_coverage_reports(session): + _create_xml_coverage_reports(session) + + +@nox.session( + python=str(ONEDIR_PYTHON_PATH), + name="create-xml-coverage-reports-onedir", + venv_params=["--system-site-packages"], +) +def create_xml_coverage_reports_onedir(session): + _create_xml_coverage_reports(session) + + +@nox.session(python="3", name="create-json-coverage-reports") +def create_json_coverage_reports(session): + _report_coverage(session, combine=True, cli_report=False, json_report=True) + + +@nox.session( + python=str(ONEDIR_PYTHON_PATH), + name="create-json-coverage-reports-onedir", + venv_params=["--system-site-packages"], +) +def create_json_coverage_reports_onedir(session): + _report_coverage(session, combine=True, cli_report=False, json_report=True) class Tee: @@ -1482,7 +1539,7 @@ def lint_salt(session): paths = session.posargs else: # TBD replace paths entries when implement pyproject.toml - paths = ["setup.py", "noxfile.py", "salt/", "tasks/"] + paths = ["setup.py", "noxfile.py", "salt/"] _lint(session, ".pylintrc", flags, paths) @@ -1594,37 +1651,6 @@ def docs_man(session, compress, update, clean): os.chdir("..") -@nox.session(name="invoke", python="3") -def invoke(session): - """ - Run invoke tasks - """ - if _upgrade_pip_setuptools_and_wheel(session): - _install_requirements(session, "zeromq") - requirements_file = os.path.join( - "requirements", "static", "ci", _get_pydir(session), "invoke.txt" - ) - install_command = ["--progress-bar=off", "-r", requirements_file] - session.install(*install_command, silent=PIP_INSTALL_SILENT) - - cmd = ["inv"] - files = [] - - # Unfortunately, invoke doesn't support the nargs functionality like argpase does. - # Let's make it behave properly - for idx, posarg in enumerate(session.posargs): - if idx == 0: - cmd.append(posarg) - continue - if posarg.startswith("--"): - cmd.append(posarg) - continue - files.append(posarg) - if files: - cmd.append("--files={}".format(" ".join(files))) - session.run(*cmd) - - @nox.session(name="changelog", python="3") @nox.parametrize("draft", [False, True]) @nox.parametrize("force", [False, True]) @@ -1762,81 +1788,175 @@ def build(session): session.run("python", "-m", "twine", "check", "dist/*") -def _pkg_test(session, cmd_args, test_type, onedir=False): - pydir = _get_pydir(session) - junit_report_filename = f"test-results-{test_type}" - runtests_log_filename = f"runtests-{test_type}" - # Install requirements - if onedir and IS_LINUX: - session_run_always(session, "python3", "-m", "relenv", "toolchain", "fetch") - if _upgrade_pip_setuptools_and_wheel(session, onedir=onedir): - if IS_WINDOWS: - file_name = "pkgtests-windows.txt" - else: - file_name = "pkgtests.txt" - - requirements_file = os.path.join( - "requirements", "static", "ci", pydir, file_name - ) - - install_command = ["--progress-bar=off", "-r", requirements_file] - session.install(*install_command, silent=PIP_INSTALL_SILENT) - - env = {} - if onedir: - env["ONEDIR_TESTRUN"] = "1" - - pytest_args = ( - cmd_args[:] - + [ - f"--junitxml=artifacts/xml-unittests-output/{junit_report_filename}.xml", - f"--log-file=artifacts/logs/{runtests_log_filename}.log", - ] - + session.posargs - ) - _pytest(session, coverage=False, cmd_args=pytest_args, env=env) - - @nox.session( python=str(ONEDIR_PYTHON_PATH), - name="test-pkgs-onedir", + name="ci-test-onedir-pkgs", venv_params=["--system-site-packages"], ) -def test_pkgs_onedir(session): +def ci_test_onedir_pkgs(session): + from nox.virtualenv import VirtualEnv + + session_warn(session, "Replacing VirtualEnv instance...") + + ci_test_onedir_path = REPO_ROOT / ".nox" / "ci-test-onedir" + session._runner.venv = VirtualEnv( + str(ci_test_onedir_path.relative_to(REPO_ROOT)), + interpreter=session._runner.func.python, + reuse_existing=True, + venv=session._runner.venv.venv_or_virtualenv == "venv", + venv_params=session._runner.venv.venv_params, + ) + os.environ["VIRTUAL_ENV"] = session._runner.venv.location + session._runner.venv.create() + if not ONEDIR_ARTIFACT_PATH.exists(): session.error( "The salt onedir artifact, expected to be in '{}', was not found".format( ONEDIR_ARTIFACT_PATH.relative_to(REPO_ROOT) ) ) - _pkg_test(session, ["pkg/tests/"], "pkg", onedir=True) - -@nox.session( - python=str(ONEDIR_PYTHON_PATH), - name="test-upgrade-pkgs-onedir", - venv_params=["--system-site-packages"], -) -@nox.parametrize("classic", [False, True]) -def test_upgrade_pkgs_onedir(session, classic): - """ - pytest pkg upgrade tests session - """ - test_type = "pkg_upgrade" - cmd_args = [ - "pkg/tests/upgrade/test_salt_upgrade.py::test_salt_upgrade", - "--upgrade", - "--no-uninstall", + common_pytest_args = [ + "--color=yes", + "--sys-stats", + "--run-destructive", + f"--output-columns={os.environ.get('OUTPUT_COLUMNS') or 120}", + "--pkg-system-service", ] - if classic: - cmd_args = cmd_args + ["--classic"] - # Workaround for installing and running classic packages from 3005.1 - # They can only run with importlib-metadata<5.0.0. - subprocess.run(["pip3", "install", "importlib-metadata==4.13.0"], check=False) + + chunks = { + "install": [ + "tests/pytests/pkg/", + ], + "upgrade": [ + "--upgrade", + "--no-uninstall", + "tests/pytests/pkg/upgrade/", + ], + "upgrade-classic": [ + "--upgrade", + "--no-uninstall", + "tests/pytests/pkg/upgrade/", + ], + "downgrade": [ + "--downgrade", + "--no-uninstall", + "tests/pytests/pkg/downgrade/", + ], + "downgrade-classic": [ + "--downgrade", + "--no-uninstall", + "tests/pytests/pkg/downgrade/", + ], + "download-pkgs": [ + "--download-pkgs", + "tests/pytests/pkg/download/", + ], + } + + if not session.posargs or session.posargs[0] not in chunks: + chunk = "install" + session.log("Choosing default 'install' test type") + else: + chunk = session.posargs.pop(0) + + cmd_args = chunks[chunk] + + if IS_LINUX: + # Fetch the toolchain + session_run_always(session, "python3", "-m", "relenv", "toolchain", "fetch") + + # Install requirements + if _upgrade_pip_setuptools_and_wheel(session): + _install_requirements(session, "pyzmq") + env = { + "ONEDIR_TESTRUN": "1", + "PKG_TEST_TYPE": chunk, + } + + if chunk in ("upgrade-classic", "downgrade-classic"): + cmd_args.append("--classic") + + pytest_args = ( + common_pytest_args[:] + + cmd_args[:] + + [ + f"--junitxml=artifacts/xml-unittests-output/test-results-{chunk}.xml", + f"--log-file=artifacts/logs/runtests-{chunk}.log", + ] + + session.posargs + ) try: - _pkg_test(session, cmd_args, test_type, onedir=True) - except nox.command.CommandFailed: - sys.exit(1) + _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + except CommandFailed: + if os.environ.get("RERUN_FAILURES", "0") == "0": + # Don't rerun on failures + return - cmd_args = ["pkg/tests/", "--no-install"] + session.posargs - _pkg_test(session, cmd_args, test_type, onedir=True) + # Don't print the system information, not the test selection on reruns + global PRINT_TEST_SELECTION + global PRINT_SYSTEM_INFO + PRINT_TEST_SELECTION = False + PRINT_SYSTEM_INFO = False + + pytest_args = ( + common_pytest_args[:] + + cmd_args[:] + + [ + f"--junitxml=artifacts/xml-unittests-output/test-results-{chunk}-rerun.xml", + f"--log-file=artifacts/logs/runtests-{chunk}-rerun.log", + "--lf", + ] + + session.posargs + ) + _pytest( + session, + coverage=False, + cmd_args=pytest_args, + env=env, + on_rerun=True, + ) + + if chunk not in ("install", "download-pkgs"): + cmd_args = chunks["install"] + pytest_args = ( + common_pytest_args[:] + + cmd_args[:] + + [ + "--no-install", + f"--junitxml=artifacts/xml-unittests-output/test-results-install.xml", + f"--log-file=artifacts/logs/runtests-install.log", + ] + + session.posargs + ) + if "downgrade" in chunk: + pytest_args.append("--use-prev-version") + if chunk in ("upgrade-classic", "downgrade-classic"): + pytest_args.append("--classic") + try: + _pytest(session, coverage=False, cmd_args=pytest_args, env=env) + except CommandFailed: + cmd_args = chunks["install"] + pytest_args = ( + common_pytest_args[:] + + cmd_args[:] + + [ + "--no-install", + f"--junitxml=artifacts/xml-unittests-output/test-results-install-rerun.xml", + f"--log-file=artifacts/logs/runtests-install-rerun.log", + "--lf", + ] + + session.posargs + ) + if "downgrade" in chunk: + pytest_args.append("--use-prev-version") + if chunk in ("upgrade-classic", "downgrade-classic"): + pytest_args.append("--classic") + _pytest( + session, + coverage=False, + cmd_args=pytest_args, + env=env, + on_rerun=True, + ) + sys.exit(0) diff --git a/pkg/common/conf/master b/pkg/common/conf/master new file mode 100644 index 000000000000..4f0fa646d495 --- /dev/null +++ b/pkg/common/conf/master @@ -0,0 +1,1359 @@ +##### Primary configuration settings ##### +########################################## +# This configuration file is used to manage the behavior of the Salt Master. +# Values that are commented out but have an empty line after the comment are +# defaults that do not need to be set in the config. If there is no blank line +# after the comment then the value is presented as an example and is not the +# default. + +# Per default, the master will automatically include all config files +# from master.d/*.conf (master.d is a directory in the same directory +# as the main master config file). +#default_include: master.d/*.conf + +# The address of the interface to bind to: +#interface: 0.0.0.0 + +# Whether the master should listen for IPv6 connections. If this is set to True, +# the interface option must be adjusted, too. (For example: "interface: '::'") +#ipv6: False + +# The tcp port used by the publisher: +#publish_port: 4505 + +# The user under which the salt master will run. Salt will update all +# permissions to allow the specified user to run the master. The exception is +# the job cache, which must be deleted if this user is changed. If the +# modified files cause conflicts, set verify_env to False. +user: salt + +# Tell the master to also use salt-ssh when running commands against minions. +#enable_ssh_minions: False + +# The port used by the communication interface. The ret (return) port is the +# interface used for the file server, authentication, job returns, etc. +#ret_port: 4506 + +# Specify the location of the daemon process ID file: +#pidfile: /var/run/salt-master.pid + +# The root directory prepended to these options: pki_dir, cachedir, +# sock_dir, log_file, autosign_file, autoreject_file, extension_modules, +# key_logfile, pidfile, autosign_grains_dir: +#root_dir: / + +# The path to the master's configuration file. +#conf_file: /etc/salt/master + +# Directory used to store public key data: +#pki_dir: /etc/salt/pki/master + +# Key cache. Increases master speed for large numbers of accepted +# keys. Available options: 'sched'. (Updates on a fixed schedule.) +# Note that enabling this feature means that minions will not be +# available to target for up to the length of the maintenance loop +# which by default is 60s. +#key_cache: '' + +# Directory to store job and cache data: +# This directory may contain sensitive data and should be protected accordingly. +# +#cachedir: /var/cache/salt/master + +# Directory where custom modules sync to. This directory can contain +# subdirectories for each of Salt's module types such as "runners", +# "output", "wheel", "modules", "states", "returners", "engines", +# "utils", etc. +# +# Note, any directories or files not found in the `module_dirs` +# location will be removed from the extension_modules path. + +#extension_modules: /var/cache/salt/master/extmods + +# Directory for custom modules. This directory can contain subdirectories for +# each of Salt's module types such as "runners", "output", "wheel", "modules", +# "states", "returners", "engines", "utils", etc. +#module_dirs: [] + +# Verify and set permissions on configuration directories at startup: +#verify_env: True + +# Set the number of hours to keep old job information in the job cache. +# This option is deprecated by the keep_jobs_seconds option. +#keep_jobs: 24 + +# Set the number of seconds to keep old job information in the job cache: +#keep_jobs_seconds: 86400 + +# The number of seconds to wait when the client is requesting information +# about running jobs. +#gather_job_timeout: 10 + +# Set the default timeout for the salt command and api. The default is 5 +# seconds. +#timeout: 5 + +# The loop_interval option controls the seconds for the master's maintenance +# process check cycle. This process updates file server backends, cleans the +# job cache and executes the scheduler. +#loop_interval: 60 + +# Set the default outputter used by the salt command. The default is "nested". +#output: nested + +# To set a list of additional directories to search for salt outputters, set the +# outputter_dirs option. +#outputter_dirs: [] + +# Set the default output file used by the salt command. Default is to output +# to the CLI and not to a file. Functions the same way as the "--out-file" +# CLI option, only sets this to a single file for all salt commands. +#output_file: None + +# Return minions that timeout when running commands like test.ping +#show_timeout: True + +# Tell the client to display the jid when a job is published. +#show_jid: False + +# By default, output is colored. To disable colored output, set the color value +# to False. +#color: True + +# Do not strip off the colored output from nested results and state outputs +# (true by default). +# strip_colors: False + +# To display a summary of the number of minions targeted, the number of +# minions returned, and the number of minions that did not return, set the +# cli_summary value to True. (False by default.) +# +#cli_summary: False + +# Set the directory used to hold unix sockets: +#sock_dir: /var/run/salt/master + +# The master can take a while to start up when lspci and/or dmidecode is used +# to populate the grains for the master. Enable if you want to see GPU hardware +# data for your master. +# enable_gpu_grains: False + +# The master maintains a job cache. While this is a great addition, it can be +# a burden on the master for larger deployments (over 5000 minions). +# Disabling the job cache will make previously executed jobs unavailable to +# the jobs system and is not generally recommended. +#job_cache: True + +# Cache minion grains, pillar and mine data via the cache subsystem in the +# cachedir or a database. +#minion_data_cache: True + +# Cache subsystem module to use for minion data cache. +#cache: localfs +# Enables a fast in-memory cache booster and sets the expiration time. +#memcache_expire_seconds: 0 +# Set a memcache limit in items (bank + key) per cache storage (driver + driver_opts). +#memcache_max_items: 1024 +# Each time a cache storage got full cleanup all the expired items not just the oldest one. +#memcache_full_cleanup: False +# Enable collecting the memcache stats and log it on `debug` log level. +#memcache_debug: False + +# Store all returns in the given returner. +# Setting this option requires that any returner-specific configuration also +# be set. See various returners in salt/returners for details on required +# configuration values. (See also, event_return_queue, and event_return_queue_max_seconds below.) +# +#event_return: mysql + +# On busy systems, enabling event_returns can cause a considerable load on +# the storage system for returners. Events can be queued on the master and +# stored in a batched fashion using a single transaction for multiple events. +# By default, events are not queued. +#event_return_queue: 0 + +# In some cases enabling event return queueing can be very helpful, but the bus +# may not busy enough to flush the queue consistently. Setting this to a reasonable +# value (1-30 seconds) will cause the queue to be flushed when the oldest event is older +# than `event_return_queue_max_seconds` regardless of how many events are in the queue. +#event_return_queue_max_seconds: 0 + +# Only return events matching tags in a whitelist, supports glob matches. +#event_return_whitelist: +# - salt/master/a_tag +# - salt/run/*/ret + +# Store all event returns **except** the tags in a blacklist, supports globs. +#event_return_blacklist: +# - salt/master/not_this_tag +# - salt/wheel/*/ret + +# Passing very large events can cause the minion to consume large amounts of +# memory. This value tunes the maximum size of a message allowed onto the +# master event bus. The value is expressed in bytes. +#max_event_size: 1048576 + +# Windows platforms lack posix IPC and must rely on slower TCP based inter- +# process communications. Set ipc_mode to 'tcp' on such systems +#ipc_mode: ipc + +# Overwrite the default tcp ports used by the minion when ipc_mode is set to 'tcp' +#tcp_master_pub_port: 4510 +#tcp_master_pull_port: 4511 + +# By default, the master AES key rotates every 24 hours. The next command +# following a key rotation will trigger a key refresh from the minion which may +# result in minions which do not respond to the first command after a key refresh. +# +# To tell the master to ping all minions immediately after an AES key refresh, set +# ping_on_rotate to True. This should mitigate the issue where a minion does not +# appear to initially respond after a key is rotated. +# +# Note that ping_on_rotate may cause high load on the master immediately after +# the key rotation event as minions reconnect. Consider this carefully if this +# salt master is managing a large number of minions. +# +# If disabled, it is recommended to handle this event by listening for the +# 'aes_key_rotate' event with the 'key' tag and acting appropriately. +# ping_on_rotate: False + +# By default, the master deletes its cache of minion data when the key for that +# minion is removed. To preserve the cache after key deletion, set +# 'preserve_minion_cache' to True. +# +# WARNING: This may have security implications if compromised minions auth with +# a previous deleted minion ID. +#preserve_minion_cache: False + +# Allow or deny minions from requesting their own key revocation +#allow_minion_key_revoke: True + +# If max_minions is used in large installations, the master might experience +# high-load situations because of having to check the number of connected +# minions for every authentication. This cache provides the minion-ids of +# all connected minions to all MWorker-processes and greatly improves the +# performance of max_minions. +# con_cache: False + +# The master can include configuration from other files. To enable this, +# pass a list of paths to this option. The paths can be either relative or +# absolute; if relative, they are considered to be relative to the directory +# the main master configuration file lives in (this file). Paths can make use +# of shell-style globbing. If no files are matched by a path passed to this +# option, then the master will log a warning message. +# +# Include a config file from some other path: +# include: /etc/salt/extra_config +# +# Include config from several files and directories: +# include: +# - /etc/salt/extra_config + + +##### Large-scale tuning settings ##### +########################################## +# Max open files +# +# Each minion connecting to the master uses AT LEAST one file descriptor, the +# master subscription connection. If enough minions connect you might start +# seeing on the console (and then salt-master crashes): +# Too many open files (tcp_listener.cpp:335) +# Aborted (core dumped) +# +# By default this value will be the one of `ulimit -Hn`, ie, the hard limit for +# max open files. +# +# If you wish to set a different value than the default one, uncomment and +# configure this setting. Remember that this value CANNOT be higher than the +# hard limit. Raising the hard limit depends on your OS and/or distribution, +# a good way to find the limit is to search the internet. For example: +# raise max open files hard limit debian +# +#max_open_files: 100000 + +# The number of worker threads to start. These threads are used to manage +# return calls made from minions to the master. If the master seems to be +# running slowly, increase the number of threads. This setting can not be +# set lower than 3. +#worker_threads: 5 + +# Set the ZeroMQ high water marks +# http://api.zeromq.org/3-2:zmq-setsockopt + +# The listen queue size / backlog +#zmq_backlog: 1000 + +# The publisher interface ZeroMQPubServerChannel +#pub_hwm: 1000 + +# The master may allocate memory per-event and not +# reclaim it. +# To set a high-water mark for memory allocation, use +# ipc_write_buffer to set a high-water mark for message +# buffering. +# Value: In bytes. Set to 'dynamic' to have Salt select +# a value for you. Default is disabled. +# ipc_write_buffer: 'dynamic' + +# These two batch settings, batch_safe_limit and batch_safe_size, are used to +# automatically switch to a batch mode execution. If a command would have been +# sent to more than minions, then run the command in +# batches of . If no batch_safe_size is specified, a default +# of 8 will be used. If no batch_safe_limit is specified, then no automatic +# batching will occur. +#batch_safe_limit: 100 +#batch_safe_size: 8 + +# Master stats enables stats events to be fired from the master at close +# to the defined interval +#master_stats: False +#master_stats_event_iter: 60 + + +##### Security settings ##### +########################################## +# Enable passphrase protection of Master private key. Although a string value +# is acceptable; passwords should be stored in an external vaulting mechanism +# and retrieved via sdb. See https://docs.saltproject.io/en/latest/topics/sdb/. +# Passphrase protection is off by default but an example of an sdb profile and +# query is as follows. +# masterkeyring: +# driver: keyring +# service: system +# +# key_pass: sdb://masterkeyring/key_pass + +# Enable passphrase protection of the Master signing_key. This only applies if +# master_sign_pubkey is set to True. This is disabled by default. +# master_sign_pubkey: True +# signing_key_pass: sdb://masterkeyring/signing_pass + +# Enable "open mode", this mode still maintains encryption, but turns off +# authentication, this is only intended for highly secure environments or for +# the situation where your keys end up in a bad state. If you run in open mode +# you do so at your own risk! +#open_mode: False + +# Enable auto_accept, this setting will automatically accept all incoming +# public keys from the minions. Note that this is insecure. +#auto_accept: False + +# The size of key that should be generated when creating new keys. +#keysize: 2048 + +# Time in minutes that an incoming public key with a matching name found in +# pki_dir/minion_autosign/keyid is automatically accepted. Expired autosign keys +# are removed when the master checks the minion_autosign directory. +# 0 equals no timeout +# autosign_timeout: 120 + +# If the autosign_file is specified, incoming keys specified in the +# autosign_file will be automatically accepted. This is insecure. Regular +# expressions as well as globing lines are supported. The file must be readonly +# except for the owner. Use permissive_pki_access to allow the group write access. +#autosign_file: /etc/salt/autosign.conf + +# Works like autosign_file, but instead allows you to specify minion IDs for +# which keys will automatically be rejected. Will override both membership in +# the autosign_file and the auto_accept setting. +#autoreject_file: /etc/salt/autoreject.conf + +# If the autosign_grains_dir is specified, incoming keys from minions with grain +# values matching those defined in files in this directory will be accepted +# automatically. This is insecure. Minions need to be configured to send the grains. +#autosign_grains_dir: /etc/salt/autosign_grains + +# Enable permissive access to the salt keys. This allows you to run the +# master or minion as root, but have a non-root group be given access to +# your pki_dir. To make the access explicit, root must belong to the group +# you've given access to. This is potentially quite insecure. If an autosign_file +# is specified, enabling permissive_pki_access will allow group access to that +# specific file. +#permissive_pki_access: False + +# Allow users on the master access to execute specific commands on minions. +# This setting should be treated with care since it opens up execution +# capabilities to non root users. By default this capability is completely +# disabled. +#publisher_acl: +# larry: +# - test.ping +# - network.* +# +# Blacklist any of the following users or modules +# +# This example would blacklist all non sudo users, including root from +# running any commands. It would also blacklist any use of the "cmd" +# module. This is completely disabled by default. +# +# +# Check the list of configured users in client ACL against users on the +# system and throw errors if they do not exist. +#client_acl_verify: True +# +#publisher_acl_blacklist: +# users: +# - root +# - '^(?!sudo_).*$' # all non sudo users +# modules: +# - cmd + +# Enforce publisher_acl & publisher_acl_blacklist when users have sudo +# access to the salt command. +# +#sudo_acl: False + +# The external auth system uses the Salt auth modules to authenticate and +# validate users to access areas of the Salt system. +#external_auth: +# pam: +# fred: +# - test.* +# +# Time (in seconds) for a newly generated token to live. Default: 12 hours +#token_expire: 43200 +# +# Allow eauth users to specify the expiry time of the tokens they generate. +# A boolean applies to all users or a dictionary of whitelisted eauth backends +# and usernames may be given. +# token_expire_user_override: +# pam: +# - fred +# - tom +# ldap: +# - gary +# +#token_expire_user_override: False + +# Set to True to enable keeping the calculated user's auth list in the token +# file. This is disabled by default and the auth list is calculated or requested +# from the eauth driver each time. +# +# Note: `keep_acl_in_token` will be forced to True when using external authentication +# for REST API (`rest` is present under `external_auth`). This is because the REST API +# does not store the password, and can therefore not retroactively fetch the ACL, so +# the ACL must be stored in the token. +#keep_acl_in_token: False + +# Auth subsystem module to use to get authorized access list for a user. By default it's +# the same module used for external authentication. +#eauth_acl_module: django + +# Allow minions to push files to the master. This is disabled by default, for +# security purposes. +#file_recv: False + +# Set a hard-limit on the size of the files that can be pushed to the master. +# It will be interpreted as megabytes. Default: 100 +#file_recv_max_size: 100 + +# Signature verification on messages published from the master. +# This causes the master to cryptographically sign all messages published to its event +# bus, and minions then verify that signature before acting on the message. +# +# This is False by default. +# +# Note that to facilitate interoperability with masters and minions that are different +# versions, if sign_pub_messages is True but a message is received by a minion with +# no signature, it will still be accepted, and a warning message will be logged. +# Conversely, if sign_pub_messages is False, but a minion receives a signed +# message it will be accepted, the signature will not be checked, and a warning message +# will be logged. This behavior went away in Salt 2014.1.0 and these two situations +# will cause minion to throw an exception and drop the message. +# sign_pub_messages: False + +# Signature verification on messages published from minions +# This requires that minions cryptographically sign the messages they +# publish to the master. If minions are not signing, then log this information +# at loglevel 'INFO' and drop the message without acting on it. +# require_minion_sign_messages: False + +# The below will drop messages when their signatures do not validate. +# Note that when this option is False but `require_minion_sign_messages` is True +# minions MUST sign their messages but the validity of their signatures +# is ignored. +# These two config options exist so a Salt infrastructure can be moved +# to signing minion messages gradually. +# drop_messages_signature_fail: False + +# Use TLS/SSL encrypted connection between master and minion. +# Can be set to a dictionary containing keyword arguments corresponding to Python's +# 'ssl.wrap_socket' method. +# Default is None. +#ssl: +# keyfile: +# certfile: +# ssl_version: PROTOCOL_TLSv1_2 + +##### Salt-SSH Configuration ##### +########################################## +# Define the default salt-ssh roster module to use +#roster: flat + +# Pass in an alternative location for the salt-ssh `flat` roster file +#roster_file: /etc/salt/roster + +# Define locations for `flat` roster files so they can be chosen when using Salt API. +# An administrator can place roster files into these locations. Then when +# calling Salt API, parameter 'roster_file' should contain a relative path to +# these locations. That is, "roster_file=/foo/roster" will be resolved as +# "/etc/salt/roster.d/foo/roster" etc. This feature prevents passing insecure +# custom rosters through the Salt API. +# +#rosters: +# - /etc/salt/roster.d +# - /opt/salt/some/more/rosters + +# The ssh password to log in with. +#ssh_passwd: '' + +#The target system's ssh port number. +#ssh_port: 22 + +# Comma-separated list of ports to scan. +#ssh_scan_ports: 22 + +# Scanning socket timeout for salt-ssh. +#ssh_scan_timeout: 0.01 + +# Boolean to run command via sudo. +#ssh_sudo: False + +# Boolean to run ssh_pre_flight script defined in roster. By default +# the script will only run if the thin_dir does not exist on the targeted +# minion. This forces the script to run regardless of the thin dir existing +# or not. +#ssh_run_pre_flight: True + +# Number of seconds to wait for a response when establishing an SSH connection. +#ssh_timeout: 60 + +# The user to log in as. +#ssh_user: root + +# The log file of the salt-ssh command: +#ssh_log_file: /var/log/salt/ssh + +# Pass in minion option overrides that will be inserted into the SHIM for +# salt-ssh calls. The local minion config is not used for salt-ssh. Can be +# overridden on a per-minion basis in the roster (`minion_opts`) +#ssh_minion_opts: +# gpg_keydir: /root/gpg + +# Set this to True to default to using ~/.ssh/id_rsa for salt-ssh +# authentication with minions +#ssh_use_home_key: False + +# Set this to True to default salt-ssh to run with ``-o IdentitiesOnly=yes``. +# This option is intended for situations where the ssh-agent offers many +# different identities and allows ssh to ignore those identities and use the +# only one specified in options. +#ssh_identities_only: False + +# List-only nodegroups for salt-ssh. Each group must be formed as either a +# comma-separated list, or a YAML list. This option is useful to group minions +# into easy-to-target groups when using salt-ssh. These groups can then be +# targeted with the normal -N argument to salt-ssh. +#ssh_list_nodegroups: {} + +# salt-ssh has the ability to update the flat roster file if a minion is not +# found in the roster. Set this to True to enable it. +#ssh_update_roster: False + +##### Master Module Management ##### +########################################## +# Manage how master side modules are loaded. + +# Add any additional locations to look for master runners: +#runner_dirs: [] + +# Add any additional locations to look for master utils: +#utils_dirs: [] + +# Enable Cython for master side modules: +#cython_enable: False + + +##### State System settings ##### +########################################## +# The state system uses a "top" file to tell the minions what environment to +# use and what modules to use. The state_top file is defined relative to the +# root of the base environment as defined in "File Server settings" below. +#state_top: top.sls + +# The master_tops option replaces the external_nodes option by creating +# a plugable system for the generation of external top data. The external_nodes +# option is deprecated by the master_tops option. +# +# To gain the capabilities of the classic external_nodes system, use the +# following configuration: +# master_tops: +# ext_nodes: +# +#master_tops: {} + +# The renderer to use on the minions to render the state data +#renderer: jinja|yaml + +# Default Jinja environment options for all templates except sls templates +#jinja_env: +# block_start_string: '{%' +# block_end_string: '%}' +# variable_start_string: '{{' +# variable_end_string: '}}' +# comment_start_string: '{#' +# comment_end_string: '#}' +# line_statement_prefix: +# line_comment_prefix: +# trim_blocks: False +# lstrip_blocks: False +# newline_sequence: '\n' +# keep_trailing_newline: False + +# Jinja environment options for sls templates +#jinja_sls_env: +# block_start_string: '{%' +# block_end_string: '%}' +# variable_start_string: '{{' +# variable_end_string: '}}' +# comment_start_string: '{#' +# comment_end_string: '#}' +# line_statement_prefix: +# line_comment_prefix: +# trim_blocks: False +# lstrip_blocks: False +# newline_sequence: '\n' +# keep_trailing_newline: False + +# The failhard option tells the minions to stop immediately after the first +# failure detected in the state execution, defaults to False +#failhard: False + +# The state_verbose and state_output settings can be used to change the way +# state system data is printed to the display. By default all data is printed. +# The state_verbose setting can be set to True or False, when set to False +# all data that has a result of True and no changes will be suppressed. +#state_verbose: True + +# The state_output setting controls which results will be output full multi line +# full, terse - each state will be full/terse +# mixed - only states with errors will be full +# changes - states with changes and errors will be full +# full_id, mixed_id, changes_id and terse_id are also allowed; +# when set, the state ID will be used as name in the output +#state_output: full + +# The state_output_diff setting changes whether or not the output from +# successful states is returned. Useful when even the terse output of these +# states is cluttering the logs. Set it to True to ignore them. +#state_output_diff: False + +# The state_output_profile setting changes whether profile information +# will be shown for each state run. +#state_output_profile: True + +# The state_output_pct setting changes whether success and failure information +# as a percent of total actions will be shown for each state run. +#state_output_pct: False + +# The state_compress_ids setting aggregates information about states which have +# multiple "names" under the same state ID in the highstate output. +#state_compress_ids: False + +# Automatically aggregate all states that have support for mod_aggregate by +# setting to 'True'. Or pass a list of state module names to automatically +# aggregate just those types. +# +# state_aggregate: +# - pkg +# +#state_aggregate: False + +# Send progress events as each function in a state run completes execution +# by setting to 'True'. Progress events are in the format +# 'salt/job//prog//'. +#state_events: False + +##### File Server settings ##### +########################################## +# Salt runs a lightweight file server written in zeromq to deliver files to +# minions. This file server is built into the master daemon and does not +# require a dedicated port. + +# The file server works on environments passed to the master, each environment +# can have multiple root directories, the subdirectories in the multiple file +# roots cannot match, otherwise the downloaded files will not be able to be +# reliably ensured. A base environment is required to house the top file. +# Example: +# file_roots: +# base: +# - /srv/salt/ +# dev: +# - /srv/salt/dev/services +# - /srv/salt/dev/states +# prod: +# - /srv/salt/prod/services +# - /srv/salt/prod/states +# +#file_roots: +# base: +# - /srv/salt +# + +# The master_roots setting configures a master-only copy of the file_roots dictionary, +# used by the state compiler. +#master_roots: +# base: +# - /srv/salt-master + +# When using multiple environments, each with their own top file, the +# default behaviour is an unordered merge. To prevent top files from +# being merged together and instead to only use the top file from the +# requested environment, set this value to 'same'. +#top_file_merging_strategy: merge + +# To specify the order in which environments are merged, set the ordering +# in the env_order option. Given a conflict, the last matching value will +# win. +#env_order: ['base', 'dev', 'prod'] + +# If top_file_merging_strategy is set to 'same' and an environment does not +# contain a top file, the top file in the environment specified by default_top +# will be used instead. +#default_top: base + +# The hash_type is the hash to use when discovering the hash of a file on +# the master server. The default is sha256, but md5, sha1, sha224, sha384 and +# sha512 are also supported. +# +# WARNING: While md5 and sha1 are also supported, do not use them due to the +# high chance of possible collisions and thus security breach. +# +# Prior to changing this value, the master should be stopped and all Salt +# caches should be cleared. +#hash_type: sha256 + +# The buffer size in the file server can be adjusted here: +#file_buffer_size: 1048576 + +# A regular expression (or a list of expressions) that will be matched +# against the file path before syncing the modules and states to the minions. +# This includes files affected by the file.recurse state. +# For example, if you manage your custom modules and states in subversion +# and don't want all the '.svn' folders and content synced to your minions, +# you could set this to '/\.svn($|/)'. By default nothing is ignored. +#file_ignore_regex: +# - '/\.svn($|/)' +# - '/\.git($|/)' + +# A file glob (or list of file globs) that will be matched against the file +# path before syncing the modules and states to the minions. This is similar +# to file_ignore_regex above, but works on globs instead of regex. By default +# nothing is ignored. +# file_ignore_glob: +# - '*.pyc' +# - '*/somefolder/*.bak' +# - '*.swp' + +# File Server Backend +# +# Salt supports a modular fileserver backend system, this system allows +# the salt master to link directly to third party systems to gather and +# manage the files available to minions. Multiple backends can be +# configured and will be searched for the requested file in the order in which +# they are defined here. The default setting only enables the standard backend +# "roots" which uses the "file_roots" option. +#fileserver_backend: +# - roots +# +# To use multiple backends list them in the order they are searched: +#fileserver_backend: +# - git +# - roots +# +# Uncomment the line below if you do not want the file_server to follow +# symlinks when walking the filesystem tree. This is set to True +# by default. Currently this only applies to the default roots +# fileserver_backend. +#fileserver_followsymlinks: False +# +# Uncomment the line below if you do not want symlinks to be +# treated as the files they are pointing to. By default this is set to +# False. By uncommenting the line below, any detected symlink while listing +# files on the Master will not be returned to the Minion. +#fileserver_ignoresymlinks: True +# +# The fileserver can fire events off every time the fileserver is updated, +# these are disabled by default, but can be easily turned on by setting this +# flag to True +#fileserver_events: False + +# Git File Server Backend Configuration +# +# Optional parameter used to specify the provider to be used for gitfs. Must be +# either pygit2 or gitpython. If unset, then both will be tried (in that +# order), and the first one with a compatible version installed will be the +# provider that is used. +# +#gitfs_provider: pygit2 + +# Along with gitfs_password, is used to authenticate to HTTPS remotes. +# gitfs_user: '' + +# Along with gitfs_user, is used to authenticate to HTTPS remotes. +# This parameter is not required if the repository does not use authentication. +#gitfs_password: '' + +# By default, Salt will not authenticate to an HTTP (non-HTTPS) remote. +# This parameter enables authentication over HTTP. Enable this at your own risk. +#gitfs_insecure_auth: False + +# Along with gitfs_privkey (and optionally gitfs_passphrase), is used to +# authenticate to SSH remotes. This parameter (or its per-remote counterpart) +# is required for SSH remotes. +#gitfs_pubkey: '' + +# Along with gitfs_pubkey (and optionally gitfs_passphrase), is used to +# authenticate to SSH remotes. This parameter (or its per-remote counterpart) +# is required for SSH remotes. +#gitfs_privkey: '' + +# This parameter is optional, required only when the SSH key being used to +# authenticate is protected by a passphrase. +#gitfs_passphrase: '' + +# When using the git fileserver backend at least one git remote needs to be +# defined. The user running the salt master will need read access to the repo. +# +# The repos will be searched in order to find the file requested by a client +# and the first repo to have the file will return it. +# When using the git backend branches and tags are translated into salt +# environments. +# Note: file:// repos will be treated as a remote, so refs you want used must +# exist in that repo as *local* refs. +#gitfs_remotes: +# - git://github.com/saltstack/salt-states.git +# - file:///var/git/saltmaster +# +# The gitfs_ssl_verify option specifies whether to ignore ssl certificate +# errors when contacting the gitfs backend. You might want to set this to +# false if you're using a git backend that uses a self-signed certificate but +# keep in mind that setting this flag to anything other than the default of True +# is a security concern, you may want to try using the ssh transport. +#gitfs_ssl_verify: True +# +# The gitfs_root option gives the ability to serve files from a subdirectory +# within the repository. The path is defined relative to the root of the +# repository and defaults to the repository root. +#gitfs_root: somefolder/otherfolder +# +# The refspecs fetched by gitfs remotes +#gitfs_refspecs: +# - '+refs/heads/*:refs/remotes/origin/*' +# - '+refs/tags/*:refs/tags/*' +# +# +##### Pillar settings ##### +########################################## +# Salt Pillars allow for the building of global data that can be made selectively +# available to different minions based on minion grain filtering. The Salt +# Pillar is laid out in the same fashion as the file server, with environments, +# a top file and sls files. However, pillar data does not need to be in the +# highstate format, and is generally just key/value pairs. +#pillar_roots: +# base: +# - /srv/pillar +# +#ext_pillar: +# - hiera: /etc/hiera.yaml +# - cmd_yaml: cat /etc/salt/yaml + + +# A list of paths to be recursively decrypted during pillar compilation. +# Entries in this list can be formatted either as a simple string, or as a +# key/value pair, with the key being the pillar location, and the value being +# the renderer to use for pillar decryption. If the former is used, the +# renderer specified by decrypt_pillar_default will be used. +#decrypt_pillar: +# - 'foo:bar': gpg +# - 'lorem:ipsum:dolor' + +# The delimiter used to distinguish nested data structures in the +# decrypt_pillar option. +#decrypt_pillar_delimiter: ':' + +# The default renderer used for decryption, if one is not specified for a given +# pillar key in decrypt_pillar. +#decrypt_pillar_default: gpg + +# List of renderers which are permitted to be used for pillar decryption. +#decrypt_pillar_renderers: +# - gpg + +# If this is `True` and the ciphertext could not be decrypted, then an error is +# raised. +#gpg_decrypt_must_succeed: False + +# The ext_pillar_first option allows for external pillar sources to populate +# before file system pillar. This allows for targeting file system pillar from +# ext_pillar. +#ext_pillar_first: False + +# The external pillars permitted to be used on-demand using pillar.ext +#on_demand_ext_pillar: +# - libvirt +# - virtkey + +# The pillar_gitfs_ssl_verify option specifies whether to ignore ssl certificate +# errors when contacting the pillar gitfs backend. You might want to set this to +# false if you're using a git backend that uses a self-signed certificate but +# keep in mind that setting this flag to anything other than the default of True +# is a security concern, you may want to try using the ssh transport. +#pillar_gitfs_ssl_verify: True + +# The pillar_opts option adds the master configuration file data to a dict in +# the pillar called "master". This is used to set simple configurations in the +# master config file that can then be used on minions. +#pillar_opts: False + +# The pillar_safe_render_error option prevents the master from passing pillar +# render errors to the minion. This is set on by default because the error could +# contain templating data which would give that minion information it shouldn't +# have, like a password! When set true the error message will only show: +# Rendering SLS 'my.sls' failed. Please see master log for details. +#pillar_safe_render_error: True + +# The pillar_source_merging_strategy option allows you to configure merging strategy +# between different sources. It accepts five values: none, recurse, aggregate, overwrite, +# or smart. None will not do any merging at all. Recurse will merge recursively mapping of data. +# Aggregate instructs aggregation of elements between sources that use the #!yamlex renderer. Overwrite +# will overwrite elements according the order in which they are processed. This is +# behavior of the 2014.1 branch and earlier. Smart guesses the best strategy based +# on the "renderer" setting and is the default value. +#pillar_source_merging_strategy: smart + +# Recursively merge lists by aggregating them instead of replacing them. +#pillar_merge_lists: False + +# Set this option to True to force the pillarenv to be the same as the effective +# saltenv when running states. If pillarenv is specified this option will be +# ignored. +#pillarenv_from_saltenv: False + +# Set this option to 'True' to force a 'KeyError' to be raised whenever an +# attempt to retrieve a named value from pillar fails. When this option is set +# to 'False', the failed attempt returns an empty string. Default is 'False'. +#pillar_raise_on_missing: False + +# Git External Pillar (git_pillar) Configuration Options +# +# Specify the provider to be used for git_pillar. Must be either pygit2 or +# gitpython. If unset, then both will be tried in that same order, and the +# first one with a compatible version installed will be the provider that +# is used. +#git_pillar_provider: pygit2 + +# If the desired branch matches this value, and the environment is omitted +# from the git_pillar configuration, then the environment for that git_pillar +# remote will be base. +#git_pillar_base: master + +# If the branch is omitted from a git_pillar remote, then this branch will +# be used instead +#git_pillar_branch: master + +# Environment to use for git_pillar remotes. This is normally derived from +# the branch/tag (or from a per-remote env parameter), but if set this will +# override the process of deriving the env from the branch/tag name. +#git_pillar_env: '' + +# Path relative to the root of the repository where the git_pillar top file +# and SLS files are located. +#git_pillar_root: '' + +# Specifies whether or not to ignore SSL certificate errors when contacting +# the remote repository. +#git_pillar_ssl_verify: False + +# When set to False, if there is an update/checkout lock for a git_pillar +# remote and the pid written to it is not running on the master, the lock +# file will be automatically cleared and a new lock will be obtained. +#git_pillar_global_lock: True + +# Git External Pillar Authentication Options +# +# Along with git_pillar_password, is used to authenticate to HTTPS remotes. +#git_pillar_user: '' + +# Along with git_pillar_user, is used to authenticate to HTTPS remotes. +# This parameter is not required if the repository does not use authentication. +#git_pillar_password: '' + +# By default, Salt will not authenticate to an HTTP (non-HTTPS) remote. +# This parameter enables authentication over HTTP. +#git_pillar_insecure_auth: False + +# Along with git_pillar_privkey (and optionally git_pillar_passphrase), +# is used to authenticate to SSH remotes. +#git_pillar_pubkey: '' + +# Along with git_pillar_pubkey (and optionally git_pillar_passphrase), +# is used to authenticate to SSH remotes. +#git_pillar_privkey: '' + +# This parameter is optional, required only when the SSH key being used +# to authenticate is protected by a passphrase. +#git_pillar_passphrase: '' + +# The refspecs fetched by git_pillar remotes +#git_pillar_refspecs: +# - '+refs/heads/*:refs/remotes/origin/*' +# - '+refs/tags/*:refs/tags/*' + +# A master can cache pillars locally to bypass the expense of having to render them +# for each minion on every request. This feature should only be enabled in cases +# where pillar rendering time is known to be unsatisfactory and any attendant security +# concerns about storing pillars in a master cache have been addressed. +# +# When enabling this feature, be certain to read through the additional ``pillar_cache_*`` +# configuration options to fully understand the tunable parameters and their implications. +# +# Note: setting ``pillar_cache: True`` has no effect on targeting Minions with Pillars. +# See https://docs.saltproject.io/en/latest/topics/targeting/pillar.html +#pillar_cache: False + +# If and only if a master has set ``pillar_cache: True``, the cache TTL controls the amount +# of time, in seconds, before the cache is considered invalid by a master and a fresh +# pillar is recompiled and stored. +# The cache TTL does not prevent pillar cache from being refreshed before its TTL expires. +#pillar_cache_ttl: 3600 + +# If and only if a master has set `pillar_cache: True`, one of several storage providers +# can be utilized. +# +# `disk`: The default storage backend. This caches rendered pillars to the master cache. +# Rendered pillars are serialized and deserialized as msgpack structures for speed. +# Note that pillars are stored UNENCRYPTED. Ensure that the master cache +# has permissions set appropriately. (Same defaults are provided.) +# +# memory: [EXPERIMENTAL] An optional backend for pillar caches which uses a pure-Python +# in-memory data structure for maximal performance. There are several caveats, +# however. First, because each master worker contains its own in-memory cache, +# there is no guarantee of cache consistency between minion requests. This +# works best in situations where the pillar rarely if ever changes. Secondly, +# and perhaps more importantly, this means that unencrypted pillars will +# be accessible to any process which can examine the memory of the ``salt-master``! +# This may represent a substantial security risk. +# +#pillar_cache_backend: disk + +# A master can also cache GPG data locally to bypass the expense of having to render them +# for each minion on every request. This feature should only be enabled in cases +# where pillar rendering time is known to be unsatisfactory and any attendant security +# concerns about storing decrypted GPG data in a master cache have been addressed. +# +# When enabling this feature, be certain to read through the additional ``gpg_cache_*`` +# configuration options to fully understand the tunable parameters and their implications. +#gpg_cache: False + +# If and only if a master has set ``gpg_cache: True``, the cache TTL controls the amount +# of time, in seconds, before the cache is considered invalid by a master and a fresh +# pillar is recompiled and stored. +#gpg_cache_ttl: 86400 + +# If and only if a master has set `gpg_cache: True`, one of several storage providers +# can be utilized. Available options are the same as ``pillar_cache_backend``. +#gpg_cache_backend: disk + + +###### Reactor Settings ##### +########################################### +# Define a salt reactor. See https://docs.saltproject.io/en/latest/topics/reactor/ +#reactor: [] + +#Set the TTL for the cache of the reactor configuration. +#reactor_refresh_interval: 60 + +#Configure the number of workers for the runner/wheel in the reactor. +#reactor_worker_threads: 10 + +#Define the queue size for workers in the reactor. +#reactor_worker_hwm: 10000 + + +##### Syndic settings ##### +########################################## +# The Salt syndic is used to pass commands through a master from a higher +# master. Using the syndic is simple. If this is a master that will have +# syndic servers(s) below it, then set the "order_masters" setting to True. +# +# If this is a master that will be running a syndic daemon for passthrough, then +# the "syndic_master" setting needs to be set to the location of the master server +# to receive commands from. + +# Set the order_masters setting to True if this master will command lower +# masters' syndic interfaces. +#order_masters: False + +# If this master will be running a salt syndic daemon, syndic_master tells +# this master where to receive commands from. +#syndic_master: masterofmasters + +# This is the 'ret_port' of the MasterOfMaster: +#syndic_master_port: 4506 + +# PID file of the syndic daemon: +#syndic_pidfile: /var/run/salt-syndic.pid + +# The log file of the salt-syndic daemon: +#syndic_log_file: /var/log/salt/syndic + +# The behaviour of the multi-syndic when connection to a master of masters failed. +# Can specify ``random`` (default) or ``ordered``. If set to ``random``, masters +# will be iterated in random order. If ``ordered`` is specified, the configured +# order will be used. +#syndic_failover: random + +# The number of seconds for the salt client to wait for additional syndics to +# check in with their lists of expected minions before giving up. +#syndic_wait: 5 + + +##### Peer Publish settings ##### +########################################## +# Salt minions can send commands to other minions, but only if the minion is +# allowed to. By default "Peer Publication" is disabled, and when enabled it +# is enabled for specific minions and specific commands. This allows secure +# compartmentalization of commands based on individual minions. + +# The configuration uses regular expressions to match minions and then a list +# of regular expressions to match functions. The following will allow the +# minion authenticated as foo.example.com to execute functions from the test +# and pkg modules. +#peer: +# foo.example.com: +# - test.* +# - pkg.* +# +# This will allow all minions to execute all commands: +#peer: +# .*: +# - .* +# +# This is not recommended, since it would allow anyone who gets root on any +# single minion to instantly have root on all of the minions! + +# Minions can also be allowed to execute runners from the salt master. +# Since executing a runner from the minion could be considered a security risk, +# it needs to be enabled. This setting functions just like the peer setting +# except that it opens up runners instead of module functions. +# +# All peer runner support is turned off by default and must be enabled before +# using. This will enable all peer runners for all minions: +#peer_run: +# .*: +# - .* +# +# To enable just the manage.up runner for the minion foo.example.com: +#peer_run: +# foo.example.com: +# - manage.up +# +# +##### Mine settings ##### +##################################### +# Restrict mine.get access from minions. By default any minion has a full access +# to get all mine data from master cache. In acl definion below, only pcre matches +# are allowed. +# mine_get: +# .*: +# - .* +# +# The example below enables minion foo.example.com to get 'network.interfaces' mine +# data only, minions web* to get all network.* and disk.* mine data and all other +# minions won't get any mine data. +# mine_get: +# foo.example.com: +# - network.interfaces +# web.*: +# - network.* +# - disk.* + + +##### Logging settings ##### +########################################## +# The location of the master log file +# The master log can be sent to a regular file, local path name, or network +# location. Remote logging works best when configured to use rsyslogd(8) (e.g.: +# ``file:///dev/log``), with rsyslogd(8) configured for network logging. The URI +# format is: ://:/ +#log_file: /var/log/salt/master +#log_file: file:///dev/log +#log_file: udp://loghost:10514 + +#log_file: /var/log/salt/master +#key_logfile: /var/log/salt/key + +# The level of messages to send to the console. +# One of 'garbage', 'trace', 'debug', info', 'warning', 'error', 'critical'. +# +# The following log levels are considered INSECURE and may log sensitive data: +# ['garbage', 'trace', 'debug'] +# +#log_level: warning + +# The level of messages to send to the log file. +# One of 'garbage', 'trace', 'debug', 'info', 'warning', 'error', 'critical'. +# If using 'log_granular_levels' this must be set to the highest desired level. +#log_level_logfile: warning + +# The date and time format used in log messages. Allowed date/time formatting +# can be seen here: http://docs.python.org/library/time.html#time.strftime +#log_datefmt: '%H:%M:%S' +#log_datefmt_logfile: '%Y-%m-%d %H:%M:%S' + +# The format of the console logging messages. Allowed formatting options can +# be seen here: http://docs.python.org/library/logging.html#logrecord-attributes +# +# Console log colors are specified by these additional formatters: +# +# %(colorlevel)s +# %(colorname)s +# %(colorprocess)s +# %(colormsg)s +# +# Since it is desirable to include the surrounding brackets, '[' and ']', in +# the coloring of the messages, these color formatters also include padding as +# well. Color LogRecord attributes are only available for console logging. +# +#log_fmt_console: '%(colorlevel)s %(colormsg)s' +#log_fmt_console: '[%(levelname)-8s] %(message)s' +# +#log_fmt_logfile: '%(asctime)s,%(msecs)03d [%(name)-17s][%(levelname)-8s] %(message)s' + +# This can be used to control logging levels more specificically. This +# example sets the main salt library at the 'warning' level, but sets +# 'salt.modules' to log at the 'debug' level: +# log_granular_levels: +# 'salt': 'warning' +# 'salt.modules': 'debug' +# +#log_granular_levels: {} + + +##### Node Groups ###### +########################################## +# Node groups allow for logical groupings of minion nodes. A group consists of +# a group name and a compound target. Nodgroups can reference other nodegroups +# with 'N@' classifier. Ensure that you do not have circular references. +# +#nodegroups: +# group1: 'L@foo.domain.com,bar.domain.com,baz.domain.com or bl*.domain.com' +# group2: 'G@os:Debian and foo.domain.com' +# group3: 'G@os:Debian and N@group1' +# group4: +# - 'G@foo:bar' +# - 'or' +# - 'G@foo:baz' + + +##### Range Cluster settings ##### +########################################## +# The range server (and optional port) that serves your cluster information +# https://github.com/ytoolshed/range/wiki/%22yamlfile%22-module-file-spec +# +#range_server: range:80 + + +##### Windows Software Repo settings ##### +########################################### +# Location of the repo on the master: +#winrepo_dir_ng: '/srv/salt/win/repo-ng' +# +# List of git repositories to include with the local repo: +#winrepo_remotes_ng: +# - 'https://github.com/saltstack/salt-winrepo-ng.git' + + +##### Windows Software Repo settings - Pre 2015.8 ##### +######################################################## +# Legacy repo settings for pre-2015.8 Windows minions. +# +# Location of the repo on the master: +#winrepo_dir: '/srv/salt/win/repo' +# +# Location of the master's repo cache file: +#winrepo_mastercachefile: '/srv/salt/win/repo/winrepo.p' +# +# List of git repositories to include with the local repo: +#winrepo_remotes: +# - 'https://github.com/saltstack/salt-winrepo.git' + +# The refspecs fetched by winrepo remotes +#winrepo_refspecs: +# - '+refs/heads/*:refs/remotes/origin/*' +# - '+refs/tags/*:refs/tags/*' +# + +##### Returner settings ###### +############################################ +# Which returner(s) will be used for minion's result: +#return: mysql + + +###### Miscellaneous settings ###### +############################################ +# Default match type for filtering events tags: startswith, endswith, find, regex, fnmatch +#event_match_type: startswith + +# Save runner returns to the job cache +#runner_returns: True + +# Permanently include any available Python 3rd party modules into thin and minimal Salt +# when they are generated for Salt-SSH or other purposes. +# The modules should be named by the names they are actually imported inside the Python. +# The value of the parameters can be either one module or a comma separated list of them. +#thin_extra_mods: foo,bar +#min_extra_mods: foo,bar,baz + + +###### Keepalive settings ###### +############################################ +# Warning: Failure to set TCP keepalives on the salt-master can result in +# not detecting the loss of a minion when the connection is lost or when +# its host has been terminated without first closing the socket. +# Salt's Presence System depends on this connection status to know if a minion +# is "present". +# ZeroMQ now includes support for configuring SO_KEEPALIVE if supported by +# the OS. If connections between the minion and the master pass through +# a state tracking device such as a firewall or VPN gateway, there is +# the risk that it could tear down the connection the master and minion +# without informing either party that their connection has been taken away. +# Enabling TCP Keepalives prevents this from happening. + +# Overall state of TCP Keepalives, enable (1 or True), disable (0 or False) +# or leave to the OS defaults (-1), on Linux, typically disabled. Default True, enabled. +#tcp_keepalive: True + +# How long before the first keepalive should be sent in seconds. Default 300 +# to send the first keepalive after 5 minutes, OS default (-1) is typically 7200 seconds +# on Linux see /proc/sys/net/ipv4/tcp_keepalive_time. +#tcp_keepalive_idle: 300 + +# How many lost probes are needed to consider the connection lost. Default -1 +# to use OS defaults, typically 9 on Linux, see /proc/sys/net/ipv4/tcp_keepalive_probes. +#tcp_keepalive_cnt: -1 + +# How often, in seconds, to send keepalives after the first one. Default -1 to +# use OS defaults, typically 75 seconds on Linux, see +# /proc/sys/net/ipv4/tcp_keepalive_intvl. +#tcp_keepalive_intvl: -1 + + +##### NetAPI settings ##### +############################################ +# Allow the raw_shell parameter to be used when calling Salt SSH client via API +#netapi_allow_raw_shell: True + +# Set a list of clients to enable in in the API +#netapi_enable_clients: [] diff --git a/pkg/common/env-cleanup-rules.yml b/pkg/common/env-cleanup-rules.yml index c04e99fdc0af..097081104733 100644 --- a/pkg/common/env-cleanup-rules.yml +++ b/pkg/common/env-cleanup-rules.yml @@ -49,6 +49,44 @@ ci: # Help files - "**/*.chm" - "**/Scripts/wmitest*" + +pkg: + darwin: + dir_patterns: + - *ci_darwin_dir_patterns + - "**/pkgconfig" + - "**/share" + - "**/artifacts/salt/opt" + - "**/artifacts/salt/etc" + - "**/artifacts/salt/Lib" + file_patterns: + - *ci_darwin_file_patterns + linux: + dir_patterns: + - *ci_linux_dir_patterns + file_patterns: + - *ci_linux_file_patterns + windows: + dir_patterns: + - *ci_windows_dir_patterns + - "**/salt/share" + - "**/site-packages/pywin32_system32" + file_patterns: + - *ci_windows_file_patterns + - "**/Scripts/py.exe" + - "**/Scripts/pyw.exe" + - "**/Scripts/venvlauncher.exe" + - "**/Scripts/venvwlauncher.exe" + - "**/Scripts/wheel*" + - "**/doc" + - "**/readme" + - "**/salt/salt-api*" + - "**/salt/salt-key*" + - "**/salt/salt-run*" + - "**/salt/salt-syndic*" + - "**/salt/salt-unity*" + - "**/salt/spm*" + - "**/salt/wheel*" # Non Windows execution modules - "**/site-packages/salt/modules/aacme.py*" - "**/site-packages/salt/modules/aix.py*" @@ -148,7 +186,6 @@ ci: - "**/site-packages/salt/modules/runit.py*" - "**/site-packages/salt/modules/s6.py*" - "**/site-packages/salt/modules/scsi.py*" - - "**/site-packages/salt/modules/seed.py*" - "**/site-packages/salt/modules/sensors.py*" - "**/site-packages/salt/modules/service.py*" - "**/site-packages/salt/modules/shadow.py*" @@ -232,41 +269,3 @@ ci: - "**/site-packages/salt/states/virt.py.py*" - "**/site-packages/salt/states/zfs.py*" - "**/site-packages/salt/states/zpool.py*" - -pkg: - darwin: - dir_patterns: - - *ci_darwin_dir_patterns - - "**/pkgconfig" - - "**/share" - - "**/artifacts/salt/opt" - - "**/artifacts/salt/etc" - - "**/artifacts/salt/Lib" - file_patterns: - - *ci_darwin_file_patterns - linux: - dir_patterns: - - *ci_linux_dir_patterns - file_patterns: - - *ci_linux_file_patterns - windows: - dir_patterns: - - *ci_windows_dir_patterns - - "**/salt/share" - - "**/site-packages/pywin32_system32" - file_patterns: - - *ci_windows_file_patterns - - "**/Scripts/py.exe" - - "**/Scripts/pyw.exe" - - "**/Scripts/venvlauncher.exe" - - "**/Scripts/venvwlauncher.exe" - - "**/Scripts/wheel*" - - "**/doc" - - "**/readme" - - "**/salt/salt-api*" - - "**/salt/salt-key*" - - "**/salt/salt-run*" - - "**/salt/salt-syndic*" - - "**/salt/salt-unity*" - - "**/salt/spm*" - - "**/salt/wheel*" diff --git a/pkg/common/salt-common.logrotate b/pkg/common/logrotate/salt-common similarity index 85% rename from pkg/common/salt-common.logrotate rename to pkg/common/logrotate/salt-common index a0306ff37024..875c17e0cc63 100644 --- a/pkg/common/salt-common.logrotate +++ b/pkg/common/logrotate/salt-common @@ -4,6 +4,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/minion { @@ -12,6 +13,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/key { @@ -20,6 +22,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/api { @@ -28,6 +31,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/syndic { @@ -36,6 +40,7 @@ rotate 7 compress notifempty + create 0640 } /var/log/salt/proxy { @@ -44,4 +49,5 @@ rotate 7 compress notifempty + create 0640 } diff --git a/pkg/common/onedir/_salt_onedir_extras.pth b/pkg/common/onedir/_salt_onedir_extras.pth new file mode 100644 index 000000000000..1e7742532dfa --- /dev/null +++ b/pkg/common/onedir/_salt_onedir_extras.pth @@ -0,0 +1 @@ +import _salt_onedir_extras; _salt_onedir_extras.setup(__file__) diff --git a/pkg/common/onedir/_salt_onedir_extras.py b/pkg/common/onedir/_salt_onedir_extras.py new file mode 100644 index 000000000000..366136ba2a98 --- /dev/null +++ b/pkg/common/onedir/_salt_onedir_extras.py @@ -0,0 +1,18 @@ +import pathlib +import sys + + +def setup(pth_file_path): + # Discover the extras-. directory + extras_parent_path = pathlib.Path(pth_file_path).resolve().parent.parent + if not sys.platform.startswith("win"): + extras_parent_path = extras_parent_path.parent + + extras_path = str(extras_parent_path / "extras-{}.{}".format(*sys.version_info)) + + if extras_path in sys.path and sys.path[0] != extras_path: + # The extras directory must come first + sys.path.remove(extras_path) + + if extras_path not in sys.path: + sys.path.insert(0, extras_path) diff --git a/pkg/common/salt-master.upstart.rhel6 b/pkg/common/salt-master.upstart.rhel6 deleted file mode 100644 index 412297ae45e4..000000000000 --- a/pkg/common/salt-master.upstart.rhel6 +++ /dev/null @@ -1,15 +0,0 @@ -description "Salt Master" - -start on runlevel [2345] -stop on runlevel [!2345] -limit nofile 100000 100000 - -script - # Read configuration variable file if it is present - [ -f /etc/default/$UPSTART_JOB ] && . /etc/default/$UPSTART_JOB - - # Activate the virtualenv if defined - [ -f $SALT_USE_VIRTUALENV/bin/activate ] && . $SALT_USE_VIRTUALENV/bin/activate - - exec salt-master -end script diff --git a/pkg/common/salt-minion.upstart.rhel6 b/pkg/common/salt-minion.upstart.rhel6 deleted file mode 100644 index ef5d588f879b..000000000000 --- a/pkg/common/salt-minion.upstart.rhel6 +++ /dev/null @@ -1,21 +0,0 @@ -description "Salt Minion" - -start on runlevel [2345] -stop on runlevel [!2345] - -# The respawn in the minion is known to cause problems -# because if the main minion process dies it has done -# so most likely for a good reason. Uncomment these -# two lines to enable respawn -#respawn -#respawn limit 10 5 - -script - # Read configuration variable file if it is present - [ -f /etc/default/$UPSTART_JOB ] && . /etc/default/$UPSTART_JOB - - # Activate the virtualenv if defined - [ -f $SALT_USE_VIRTUALENV/bin/activate ] && . $SALT_USE_VIRTUALENV/bin/activate - - exec salt-minion -end script diff --git a/pkg/common/salt-syndic.upstart.rhel6 b/pkg/common/salt-syndic.upstart.rhel6 deleted file mode 100644 index c9467156b0d4..000000000000 --- a/pkg/common/salt-syndic.upstart.rhel6 +++ /dev/null @@ -1,14 +0,0 @@ -description "salt-syndic" - -start on runlevel [2345] -stop on runlevel [!2345] - -script - # Read configuration variable file if it is present - [ -f /etc/default/$UPSTART_JOB ] && . /etc/default/$UPSTART_JOB - - # Activate the virtualenv if defined - [ -f $SALT_USE_VIRTUALENV/bin/activate ] && . $SALT_USE_VIRTUALENV/bin/activate - - exec salt-syndic -end script diff --git a/pkg/debian/changelog b/pkg/debian/changelog index 1cb607032e12..12f388a11280 100644 --- a/pkg/debian/changelog +++ b/pkg/debian/changelog @@ -1,3 +1,1363 @@ +salt (3006.5) stable; urgency=medium + + + # Removed + + * Tech Debt - support for pysss removed due to functionality addition in Python 3.3 [#65029](https://github.com/saltstack/salt/issues/65029) + + # Fixed + + * Improved error message when state arguments are accidentally passed as a string [#38098](https://github.com/saltstack/salt/issues/38098) + * Allow `pip.install` to create a log file that is passed in if the parent directory is writeable [#44722](https://github.com/saltstack/salt/issues/44722) + * Fixed merging of complex pillar overrides with salt-ssh states [#59802](https://github.com/saltstack/salt/issues/59802) + * Fixed gpg pillar rendering with salt-ssh [#60002](https://github.com/saltstack/salt/issues/60002) + * Made salt-ssh states not re-render pillars unnecessarily [#62230](https://github.com/saltstack/salt/issues/62230) + * Made Salt maintain options in Debian package repo definitions [#64130](https://github.com/saltstack/salt/issues/64130) + * Migrated all [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts). + + * `tasks/docs.py` *> `tools/precommit/docs.py` + * `tasks/docstrings.py` *> `tools/precommit/docstrings.py` + * `tasks/loader.py` *> `tools/precommit/loader.py` + * `tasks/filemap.py` *> `tools/precommit/filemap.py` [#64374](https://github.com/saltstack/salt/issues/64374) + * Fix salt user login shell path in Debian packages [#64377](https://github.com/saltstack/salt/issues/64377) + * Fill out lsb_distrib_xxxx (best estimate) grains if problems with retrieving lsb_release data [#64473](https://github.com/saltstack/salt/issues/64473) + * Fixed an issue in the ``file.directory`` state where the ``children_only`` keyword + argument was not being respected. [#64497](https://github.com/saltstack/salt/issues/64497) + * Move salt.ufw to correct location /etc/ufw/applications.d/ [#64572](https://github.com/saltstack/salt/issues/64572) + * Fixed salt-ssh stacktrace when retcode is not an integer [#64575](https://github.com/saltstack/salt/issues/64575) + * Fixed SSH shell seldomly fails to report any exit code [#64588](https://github.com/saltstack/salt/issues/64588) + * Fixed some issues in x509_v2 execution module private key functions [#64597](https://github.com/saltstack/salt/issues/64597) + * Fixed grp.getgrall() in utils/user.py causing performance issues [#64888](https://github.com/saltstack/salt/issues/64888) + * Fix user.list_groups omits remote groups via sssd, etc. [#64953](https://github.com/saltstack/salt/issues/64953) + * Ensure sync from _grains occurs before attempting pillar compilation in case custom grain used in pillar file [#65027](https://github.com/saltstack/salt/issues/65027) + * Moved gitfs locks to salt working dir to avoid lock wipes [#65086](https://github.com/saltstack/salt/issues/65086) + * Only attempt to create a keys directory when `--gen-keys` is passed to the `salt-key` CLI [#65093](https://github.com/saltstack/salt/issues/65093) + * Fix nonce verification, request server replies do not stomp on eachother. [#65114](https://github.com/saltstack/salt/issues/65114) + * speed up yumpkg list_pkgs by not requiring digest or signature verification on lookup. [#65152](https://github.com/saltstack/salt/issues/65152) + * Fix pkg.latest failing on windows for winrepo packages where the package is already up to date [#65165](https://github.com/saltstack/salt/issues/65165) + * Ensure __kwarg__ is preserved when checking for kwargs. This change affects proxy minions when used with Deltaproxy, which had kwargs popped when targeting multiple minions id. [#65179](https://github.com/saltstack/salt/issues/65179) + * Fixes traceback when state id is an int in a reactor SLS file. [#65210](https://github.com/saltstack/salt/issues/65210) + * Install logrotate config as /etc/logrotate.d/salt-common for Debian packages + Remove broken /etc/logrotate.d/salt directory from 3006.3 if it exists. [#65231](https://github.com/saltstack/salt/issues/65231) + * Use ``sha256`` as the default ``hash_type``. It has been the default since Salt v2016.9 [#65287](https://github.com/saltstack/salt/issues/65287) + * Preserve ownership on log rotation [#65288](https://github.com/saltstack/salt/issues/65288) + * Ensure that the correct value of jid_inclue is passed if the argument is included in the passed keyword arguments. [#65302](https://github.com/saltstack/salt/issues/65302) + * Uprade relenv to 0.14.2 + * Update openssl to address CVE-2023-5363. + * Fix bug in openssl setup when openssl binary can't be found. + * Add M1 mac support. [#65316](https://github.com/saltstack/salt/issues/65316) + * Fix regex for filespec adding/deleting fcontext policy in selinux [#65340](https://github.com/saltstack/salt/issues/65340) + * Ensure CLI options take priority over Saltfile options [#65358](https://github.com/saltstack/salt/issues/65358) + * Test mode for state function `saltmod.wheel` no longer set's `result` to `(None,)` [#65372](https://github.com/saltstack/salt/issues/65372) + * Client only process events which tag conforms to an event return. [#65400](https://github.com/saltstack/salt/issues/65400) + * Fixes an issue setting user or machine policy on Windows when the Group Policy + directory is missing [#65411](https://github.com/saltstack/salt/issues/65411) + * Fix regression in file module which was not re-using a file client. [#65450](https://github.com/saltstack/salt/issues/65450) + * pip.installed state will now properly fail when a specified user does not exists [#65458](https://github.com/saltstack/salt/issues/65458) + * Publish channel connect callback method properly closes it's request channel. [#65464](https://github.com/saltstack/salt/issues/65464) + * Ensured the pillar in SSH wrapper modules is the same as the one used in template rendering when overrides are passed [#65483](https://github.com/saltstack/salt/issues/65483) + * Fix file.comment ignore_missing not working with multiline char [#65501](https://github.com/saltstack/salt/issues/65501) + * Warn when an un-closed transport client is being garbage collected. [#65554](https://github.com/saltstack/salt/issues/65554) + * Only generate the HMAC's for ``libssl.so.1.1`` and ``libcrypto.so.1.1`` if those files exist. [#65581](https://github.com/saltstack/salt/issues/65581) + * Fixed an issue where Salt Cloud would fail if it could not delete lingering + PAexec binaries [#65584](https://github.com/saltstack/salt/issues/65584) + + # Added + + * Added Salt support for Debian 12 [#64223](https://github.com/saltstack/salt/issues/64223) + * Added Salt support for Amazon Linux 2023 [#64455](https://github.com/saltstack/salt/issues/64455) + + # Security + + * Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 [#65268](https://github.com/saltstack/salt/issues/65268) + * Bump to `cryptography==41.0.7` due to https://github.com/advisories/GHSA-jfhm-5ghh-2f97 [#65643](https://github.com/saltstack/salt/issues/65643) + + + -- Salt Project Packaging Tue, 12 Dec 2023 17:52:33 +0000 + +salt (3006.4) stable; urgency=medium + + + # Security + + * Fix CVE-2023-34049 by ensuring we do not use a predictable name for the script and correctly check returncode of scp command. + This only impacts salt*ssh users using the pre-flight option. [#cve-2023-34049](https://github.com/saltstack/salt/issues/cve-2023-34049) + * Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65163](https://github.com/saltstack/salt/issues/65163) + * Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 [#65268](https://github.com/saltstack/salt/issues/65268) + * Upgrade relenv to 0.13.12 to address CVE-2023-4807 [#65316](https://github.com/saltstack/salt/issues/65316) + * Bump to `urllib3==1.26.17` or `urllib3==2.0.6` due to https://github.com/advisories/GHSA-v845-jxx5-vc9f [#65334](https://github.com/saltstack/salt/issues/65334) + * Bump to `gitpython==3.1.37` due to https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65383](https://github.com/saltstack/salt/issues/65383) + + + -- Salt Project Packaging Mon, 16 Oct 2023 17:22:41 +0000 + +salt (3006.3) stable; urgency=medium + + + # Removed + + * Fedora 36 support was removed because it reached EOL [#64315](https://github.com/saltstack/salt/issues/64315) + * Handle deprecation warnings: + + * Switch to `FullArgSpec` since Py 3.11 no longer has `ArgSpec`, deprecated since Py 3.0 + * Stop using the deprecated `cgi` module + * Stop using the deprecated `pipes` module + * Stop using the deprecated `imp` module [#64553](https://github.com/saltstack/salt/issues/64553) + + # Changed + + * Replace libnacl with PyNaCl [#64372](https://github.com/saltstack/salt/issues/64372) + * Don't hardcode the python version on the Salt Package tests and on the `pkg/debian/salt-cloud.postinst` file [#64553](https://github.com/saltstack/salt/issues/64553) + * Some more deprecated code fixes: + + * Stop using the deprecated `locale.getdefaultlocale()` function + * Stop accessing deprecated attributes + * `pathlib.Path.__enter__()` usage is deprecated and not required, a no*op [#64565](https://github.com/saltstack/salt/issues/64565) + * Bump to `pyyaml==6.0.1` due to https://github.com/yaml/pyyaml/issues/601 and address lint issues [#64657](https://github.com/saltstack/salt/issues/64657) + + # Fixed + + * Fix for assume role when used salt-cloud to create aws ec2. [#52501](https://github.com/saltstack/salt/issues/52501) + * fixes aptpkg module by checking for blank comps. [#58667](https://github.com/saltstack/salt/issues/58667) + * `wheel.file_roots.find` is now able to find files in subdirectories of the roots. [#59800](https://github.com/saltstack/salt/issues/59800) + * pkg.latest no longer fails when multiple versions are reported to be installed (e.g. updating the kernel) [#60931](https://github.com/saltstack/salt/issues/60931) + * Do not update the credentials dictionary in `utils/aws.py` while iterating over it, and use the correct delete functionality [#61049](https://github.com/saltstack/salt/issues/61049) + * fixed runner not having a proper exit code when runner modules throw an exception. [#61173](https://github.com/saltstack/salt/issues/61173) + * `pip.list_all_versions` now works with `index_url` and `extra_index_url` [#61610](https://github.com/saltstack/salt/issues/61610) + * speed up file.recurse by using prefix with cp.list_master_dir and remove an un-needed loop. [#61998](https://github.com/saltstack/salt/issues/61998) + * Preserve test=True condition while running sub states. [#62590](https://github.com/saltstack/salt/issues/62590) + * Job returns are only sent to originating master [#62834](https://github.com/saltstack/salt/issues/62834) + * Fixes an issue with failing subsequent state runs with the lgpo state module. + The ``lgpo.get_polcy`` function now returns all boolean settings. [#63296](https://github.com/saltstack/salt/issues/63296) + * Fix SELinux get policy with trailing whitespace [#63336](https://github.com/saltstack/salt/issues/63336) + * Fixes an issue with boolean settings not being reported after being set. The + ``lgpo.get_polcy`` function now returns all boolean settings. [#63473](https://github.com/saltstack/salt/issues/63473) + * Ensure body is returned when salt.utils.http returns something other than 200 with tornado backend. [#63557](https://github.com/saltstack/salt/issues/63557) + * Allow long running pillar and file client requests to finish using request_channel_timeout and request_channel_tries minion config. [#63824](https://github.com/saltstack/salt/issues/63824) + * Fix state_queue type checking to allow int values [#64122](https://github.com/saltstack/salt/issues/64122) + * Call global logger when catching pip.list exceptions in states.pip.installed + Rename global logger `log` to `logger` inside pip_state [#64169](https://github.com/saltstack/salt/issues/64169) + * Fixes permissions created by the Debian and RPM packages for the salt user. + + The salt user created by the Debian and RPM packages to run the salt*master process, was previously given ownership of various directories in a way which compromised the benefits of running the salt-master process as a non-root user. + + This fix sets the salt user to only have write access to those files and + directories required for the salt*master process to run. [#64193](https://github.com/saltstack/salt/issues/64193) + * Fix user.present state when groups is unset to ensure the groups are unchanged, as documented. [#64211](https://github.com/saltstack/salt/issues/64211) + * Fixes issue with MasterMinion class loading configuration from `/etc/salt/minion.d/*.conf. + + The MasterMinion class (used for running orchestraions on master and other functionality) was incorrectly loading configuration from `/etc/salt/minion.d/*.conf`, when it should only load configuration from `/etc/salt/master` and `/etc/salt/master.d/*.conf`. [#64219](https://github.com/saltstack/salt/issues/64219) + * Fixed issue in mac_user.enable_auto_login that caused the user's keychain to be reset at each boot [#64226](https://github.com/saltstack/salt/issues/64226) + * Fixed KeyError in logs when running a state that fails. [#64231](https://github.com/saltstack/salt/issues/64231) + * Fixed x509_v2 `create_private_key`/`create_crl` unknown kwargs: __pub_fun... [#64232](https://github.com/saltstack/salt/issues/64232) + * remove the hard coded python version in error. [#64237](https://github.com/saltstack/salt/issues/64237) + * `salt-pip` now properly errors out when being called from a non `onedir` environment. [#64249](https://github.com/saltstack/salt/issues/64249) + * Ensure we return an error when adding the key fails in the pkgrepo state for debian hosts. [#64253](https://github.com/saltstack/salt/issues/64253) + * Fixed file client private attribute reference on `SaltMakoTemplateLookup` [#64280](https://github.com/saltstack/salt/issues/64280) + * Fix pkgrepo.absent failures on apt-based systems when repo either a) contains a + trailing slash, or b) there is an arch mismatch. [#64286](https://github.com/saltstack/salt/issues/64286) + * Fix detection of Salt codename by "salt_version" execution module [#64306](https://github.com/saltstack/salt/issues/64306) + * Ensure selinux values are handled lowercase [#64318](https://github.com/saltstack/salt/issues/64318) + * Remove the `clr.AddReference`, it is causing an `Illegal characters in path` exception [#64339](https://github.com/saltstack/salt/issues/64339) + * Update `pkg.group_installed` state to support repo options [#64348](https://github.com/saltstack/salt/issues/64348) + * Fix salt user login shell path in Debian packages [#64377](https://github.com/saltstack/salt/issues/64377) + * Allow for multiple user's keys presented when authenticating, for example: root, salt, etc. [#64398](https://github.com/saltstack/salt/issues/64398) + * Fixed an issue with ``lgpo_reg`` where existing entries for the same key in + ``Registry.pol`` were being overwritten in subsequent runs if the value name in + the subesequent run was contained in the existing value name. For example, a + key named ``SetUpdateNotificationLevel`` would be overwritten by a subsequent + run attempting to set ``UpdateNotificationLevel`` [#64401](https://github.com/saltstack/salt/issues/64401) + * Add search for %ProgramData%\Chocolatey\choco.exe to determine if Chocolatey is installed or not [#64427](https://github.com/saltstack/salt/issues/64427) + * Fix regression for user.present on handling groups with dupe GIDs [#64430](https://github.com/saltstack/salt/issues/64430) + * Fix inconsistent use of args in ssh_auth.managed [#64442](https://github.com/saltstack/salt/issues/64442) + * Ensure we raise an error when the name argument is invalid in pkgrepo.managed state for systems using apt. [#64451](https://github.com/saltstack/salt/issues/64451) + * Fix file.symlink will not replace/update existing symlink [#64477](https://github.com/saltstack/salt/issues/64477) + * Fixed salt-ssh state.* commands returning retcode 0 when state/pillar rendering fails [#64514](https://github.com/saltstack/salt/issues/64514) + * Fix pkg.install when using a port in the url. [#64516](https://github.com/saltstack/salt/issues/64516) + * `win_pkg` Fixes an issue runing `pkg.install` with `version=latest` where the + new installer would not be cached if there was already an installer present + with the same name. [#64519](https://github.com/saltstack/salt/issues/64519) + * Added a `test:full` label in the salt repository, which, when selected, will force a full test run. [#64539](https://github.com/saltstack/salt/issues/64539) + * Syndic's async_req_channel uses the asynchornous version of request channel [#64552](https://github.com/saltstack/salt/issues/64552) + * Ensure runners properly save information to job cache. [#64570](https://github.com/saltstack/salt/issues/64570) + * Added salt.ufw to salt-master install on Debian and Ubuntu [#64572](https://github.com/saltstack/salt/issues/64572) + * Added support for Chocolatey 2.0.0+ while maintaining support for older versions [#64622](https://github.com/saltstack/salt/issues/64622) + * Updated semanage fcontext to use --modify if context already exists when adding context [#64625](https://github.com/saltstack/salt/issues/64625) + * Preserve request client socket between requests. [#64627](https://github.com/saltstack/salt/issues/64627) + * Show user friendly message when pillars timeout [#64651](https://github.com/saltstack/salt/issues/64651) + * File client timeouts durring jobs show user friendly errors instead of tracbacks [#64653](https://github.com/saltstack/salt/issues/64653) + * SaltClientError does not log a traceback on minions, we expect these to happen so a user friendly log is shown. [#64729](https://github.com/saltstack/salt/issues/64729) + * Look in location salt is running from, this accounts for running from an unpacked onedir file that has not been installed. [#64877](https://github.com/saltstack/salt/issues/64877) + * Preserve credentials on spawning platforms, minions no longer re-authenticate + with every job when using `multiprocessing=True`. [#64914](https://github.com/saltstack/salt/issues/64914) + * Fixed uninstaller to not remove the `salt` directory by default. This allows + the `extras*3.##` folder to persist so salt-pip dependencies are not wiped out + during an upgrade. [#64957](https://github.com/saltstack/salt/issues/64957) + * fix msteams by adding the missing header that Microsoft is now enforcing. [#64973](https://github.com/saltstack/salt/issues/64973) + * Fix __env__ and improve cache cleaning see more info at pull #65017. [#65002](https://github.com/saltstack/salt/issues/65002) + * Better error message on inconsistent decoded payload [#65020](https://github.com/saltstack/salt/issues/65020) + * Handle permissions access error when calling `lsb_release` with the salt user [#65024](https://github.com/saltstack/salt/issues/65024) + * Allow schedule state module to update schedule when the minion is offline. [#65033](https://github.com/saltstack/salt/issues/65033) + * Fixed creation of wildcard DNS in SAN in `x509_v2` [#65072](https://github.com/saltstack/salt/issues/65072) + * The macOS installer no longer removes the extras directory [#65073](https://github.com/saltstack/salt/issues/65073) + + # Added + + * Added a script to automate setting up a 2nd minion in a user context on Windows [#64439](https://github.com/saltstack/salt/issues/64439) + * Several fixes to the CI workflow: + + * Don't override the `on` Jinja block on the `ci.yaml` template. This enables reacting to labels getting added/removed + to/from pull requests. + * Switch to using `tools` and re*use the event payload available instead of querying the GH API again to get the pull + request labels + * Concentrate test selection by labels to a single place + * Enable code coverage on pull*requests by setting the `test:coverage` label [#64547](https://github.com/saltstack/salt/issues/64547) + + # Security + + * Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre*existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA*5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA*x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA*w7pp-m8wf-vj6r [#64595](https://github.com/saltstack/salt/issues/64595) + * Bump to `aiohttp==3.8.5` due to https://github.com/advisories/GHSA-45c4-8wx5-qw6w [#64687](https://github.com/saltstack/salt/issues/64687) + * Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) + * Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python*3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) + * Update to `gitpython>=3.1.32` due to https://github.com/advisories/GHSA-pr76-5cm5-w9cj [#64988](https://github.com/saltstack/salt/issues/64988) + + + -- Salt Project Packaging Wed, 06 Sep 2023 16:51:25 +0000 + +salt (3006.2) stable; urgency=medium + + + # Fixed + + * In scenarios where PythonNet fails to load, Salt will now fall back to WMI for + gathering grains information [#64897](https://github.com/saltstack/salt/issues/64897) + + # Security + + * fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection [#cve-2023-20897](https://github.com/saltstack/salt/issues/cve-2023-20897) + * Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. [#cve-2023-20898](https://github.com/saltstack/salt/issues/cve-2023-20898) + * Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA*j8r2-6x86-q33q [#64336](https://github.com/saltstack/salt/issues/64336) + * Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre*existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA*5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA*x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA*w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 [#64595](https://github.com/saltstack/salt/issues/64595) + * Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) + * Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python*3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) + + + -- Salt Project Packaging Wed, 09 Aug 2023 12:01:52 +0000 + +salt (3006.1) stable; urgency=medium + + + # Fixed + + * Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) + * Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) + * add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) + * Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) + * Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) + * Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) + * Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) + * Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) + * Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) + * Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) + * Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) + * Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) + * Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) + * Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) + * lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) + * Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) + * Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) + * Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) + * Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) + * Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) + * Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) + * Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) + * Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) + * Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) + * ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + + + -- Salt Project Packaging Fri, 05 May 2023 17:44:35 +0000 + +salt (3006.0) stable; urgency=medium + + + # Removed + + * Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) + * Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) + * Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) + * Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) + * Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + + # Deprecated + + * renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) + * Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) + * The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) + * The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) + * Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) + * Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) + * Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) + * Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) + * Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) + * Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) + * `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + + # Changed + + * More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) + * Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) + * ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) + * Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) + * Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) + * Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) + * Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) + * Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) + * Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) + * Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) + * Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) + * Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) + * netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt*api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt*api. [#63050](https://github.com/saltstack/salt/issues/63050) + * Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) + * Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) + * Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) + * Upgraded to `relenv==0.9.0` [#63883](https://github.com/saltstack/salt/issues/63883) + + # Fixed + + * Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) + * Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) + * Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) + * Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) + * Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) + * Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) + * Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) + * Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) + * Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) + * Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) + * Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) + * Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) + * Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) + * When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) + * Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) + * Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) + * Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) + * Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) + * Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) + * Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) + * Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) + * Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) + * Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) + * Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) + * Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) + * Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) + * Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) + * Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) + * Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) + * Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) + * Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) + * linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) + * Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) + * Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) + * Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) + * Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) + * Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) + * Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) + * Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) + * Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) + * Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) + * add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) + * Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) + * Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) + * Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) + * Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) + * Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) + * Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) + * Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) + * Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) + * Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) + * Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) + * Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) + * Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) + * Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) + * state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) + * Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) + * Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) + * Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb*release is installed or not. They have been changed to + only derive these OS grains from `/etc/os*release`. [#61618](https://github.com/saltstack/salt/issues/61618) + * Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) + * Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) + * Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) + * Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) + * Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) + * Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) + * Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) + * When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) + * Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) + * win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) + * win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) + * Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) + * Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) + * Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) + * Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) + * Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) + * Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) + * Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) + * Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) + * Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) + * Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) + * Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) + * Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) + * Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) + * Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) + * Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) + * Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) + * Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) + * Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) + * Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) + * Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) + * Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) + * Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) + * Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) + * Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) + * Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) + * Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) + * Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) + * Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) + * Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) + * Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) + * Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) + * Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) + * Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) + * Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) + * Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) + * When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) + * Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) + * Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) + * When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) + * The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) + * sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) + * Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) + * Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) + * Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) + * Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) + * Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) + * Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) + * Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) + * Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) + * Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) + * Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) + * Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) + * Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) + * Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) + * Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) + * Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) + * Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) + * Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) + * Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) + * updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) + * Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) + * Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) + * Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) + * Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) + * Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) + * Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) + * Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) + * Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) + * Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) + * Remove mako as a dependency in Windows and macOS. [#62785](https://github.com/saltstack/salt/issues/62785) + * Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) + * Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) + * Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) + * Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) + * Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) + * Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) + * Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) + * Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) + * LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) + * Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) + * Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) + * Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) + * Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) + * Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) + * removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) + * Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) + * Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) + * Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) + * Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) + * Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) + * Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) + * Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) + * Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) + * Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) + * The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) + * Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) + * Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) + * Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) + * Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) + * Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) + * TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) + * Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) + * User responsible for the runner is now correctly reported in the events on the event bus for the runner. [#63148](https://github.com/saltstack/salt/issues/63148) + * Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) + * Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) + * Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) + * Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) + * ``service.status`` on Windows does no longer throws a CommandExecutionError if + the service is not found on the system. It now returns "Not Found" instead. [#63577](https://github.com/saltstack/salt/issues/63577) + * When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) + * LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) + * Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (``delete_after``) [#63650](https://github.com/saltstack/salt/issues/63650) + * When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) + * have salt.template.compile_template_str cleanup its temp files. [#63724](https://github.com/saltstack/salt/issues/63724) + * Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) + * Fixed an issue with generating fingerprints for public keys with different line endings [#63742](https://github.com/saltstack/salt/issues/63742) + * Add `fileserver_interval` and `maintenance_interval` master configuration options. These options control how often to restart the FileServerUpdate and Maintenance processes. Some file server and pillar configurations are known to cause memory leaks over time. A notable example of this are configurations that use pygit2. Salt can not guarantee dependency libraries like pygit2 won't leak memory. Restarting any long running processes that use pygit2 guarantees we can keep the master's memory usage in check. [#63747](https://github.com/saltstack/salt/issues/63747) + * mac_xattr.list and mac_xattr.read will replace undecode-able bytes to avoid raising CommandExecutionError. [#63779](https://github.com/saltstack/salt/issues/63779) [#63779](https://github.com/saltstack/salt/issues/63779) + * Change default GPG keyserver from pgp.mit.edu to keys.openpgp.org. [#63806](https://github.com/saltstack/salt/issues/63806) + * fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) + * Ensure kwargs is passed along to _call_apt when passed into install function. [#63847](https://github.com/saltstack/salt/issues/63847) + * remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) + * add linux_distribution to util to stop dep warning [#63904](https://github.com/saltstack/salt/issues/63904) + * Fix valuerror when trying to close fileclient. Remove usage of __del__ and close the filclient properly. [#63920](https://github.com/saltstack/salt/issues/63920) + * Handle the situation when a sub proxy minion does not init properly, eg. an exception happens, and the sub proxy object is not available. [#63923](https://github.com/saltstack/salt/issues/63923) + * Clarifying documentation for extension_modules configuration option. [#63929](https://github.com/saltstack/salt/issues/63929) + * Windows pkg module now properly handles versions containing strings [#63935](https://github.com/saltstack/salt/issues/63935) + * Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn't used by that function. [#63948](https://github.com/saltstack/salt/issues/63948) + * Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows [#63981](https://github.com/saltstack/salt/issues/63981) + * Hardened permissions on workers.ipc and master_event_pub.ipc. [#64063](https://github.com/saltstack/salt/issues/64063) + + # Added + + * Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `*-salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) + * Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) + * Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) + * Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) + * A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) + * Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) + * Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) + * Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) + * Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) + * State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) + * Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) + * Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) + * Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) + * Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) + * Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) + * Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) + * Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) + * Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) + * Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) + * Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) + * Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) + * Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) + * 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) + * Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) + * Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) + * Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) + * Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) + * Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) + * Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) + * Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) + * Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) + * Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) + * Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) + * Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) + * Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) + * Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) + * Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) + * Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) + * Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) + * Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) + * Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) + * Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) + * Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) + * salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) + * Fix running fast tests twice and add git labels to suite. [#63081](https://github.com/saltstack/salt/issues/63081) + * Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) + * Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) + * Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) + * Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) + * Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) + * Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) + * Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) + * Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) + * Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) + * Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) + * Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) + * Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) + * Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) + * Adding the ability to exclude arguments from a state that end up passed to cmd.retcode when requisites such as onlyif or unless are used. [#63956](https://github.com/saltstack/salt/issues/63956) + * Add --next-release argument to salt/version.py, which prints the next upcoming release. [#64023](https://github.com/saltstack/salt/issues/64023) + + # Security + + * Upgrade Requirements Due to Security Issues. + + * Upgrade to `cryptography>=39.0.1` due to: + * https://github.com/advisories/GHSA*x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA*w7pp-m8wf-vj6r + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. + * Update to `markdown*it-py==2.2.0` due to: + * https://github.com/advisories/GHSA*jrwr-5x3p-hvc3 + * https://github.com/advisories/GHSA*vrjv-mxr7-vjf8 [#63882](https://github.com/saltstack/salt/issues/63882) + + + -- Salt Project Packaging Tue, 18 Apr 2023 20:56:10 +0000 + +salt (1:3006.0rc3) stable; urgency=medium + + + # Removed + + * Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) + * Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) + * Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) + * Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) + * Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + + # Deprecated + + * renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) + * Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) + * The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) + * The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) + * Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) + * Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) + * Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) + * Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) + * Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) + * Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) + * `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + + # Changed + + * More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) + * Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) + * ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) + * Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) + * Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) + * Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) + * Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) + * Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) + * Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) + * Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) + * Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) + * Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) + * netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt*api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt*api. [#63050](https://github.com/saltstack/salt/issues/63050) + * Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) + * Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) + * Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) + * Upgraded to `relenv==0.9.0` [#63883](https://github.com/saltstack/salt/issues/63883) + + # Fixed + + * Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) + * Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) + * Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) + * Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) + * Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) + * Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) + * Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) + * Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) + * Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) + * Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) + * Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) + * Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) + * Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) + * When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) + * Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) + * Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) + * Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) + * Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) + * Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) + * Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) + * Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) + * Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) + * Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) + * Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) + * Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) + * Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) + * Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) + * Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) + * Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) + * Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) + * Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) + * linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) + * Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) + * Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) + * Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) + * Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) + * Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) + * Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) + * Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) + * Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) + * Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) + * add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) + * Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) + * Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) + * Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) + * Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) + * Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) + * Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) + * Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) + * Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) + * Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) + * Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) + * Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) + * Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) + * Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) + * state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) + * Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) + * Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) + * Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb*release is installed or not. They have been changed to + only derive these OS grains from `/etc/os*release`. [#61618](https://github.com/saltstack/salt/issues/61618) + * Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) + * Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) + * Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) + * Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) + * Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) + * Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) + * Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) + * When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) + * Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) + * win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) + * win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) + * Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) + * Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) + * Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) + * Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) + * Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) + * Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) + * Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) + * Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) + * Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) + * Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) + * Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) + * Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) + * Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) + * Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) + * Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) + * Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) + * Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) + * Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) + * Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) + * Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) + * Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) + * Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) + * Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) + * Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) + * Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) + * Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) + * Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) + * Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) + * Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) + * Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) + * Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) + * Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) + * Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) + * Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) + * Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) + * When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) + * Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) + * Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) + * When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) + * The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) + * sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) + * Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) + * Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) + * Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) + * Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) + * Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) + * Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) + * Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) + * Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) + * Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) + * Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) + * Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) + * Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) + * Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) + * Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) + * Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) + * Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) + * Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) + * Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) + * updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) + * Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) + * Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) + * Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) + * Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) + * Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) + * Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) + * Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) + * Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) + * Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) + * Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) + * Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) + * Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) + * Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) + * Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) + * Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) + * Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) + * Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) + * LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) + * Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) + * Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) + * Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) + * Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) + * Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) + * removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) + * Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) + * Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) + * Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) + * Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) + * Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) + * Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) + * Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) + * Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) + * Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) + * The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) + * Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) + * Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) + * Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) + * Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) + * Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) + * TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) + * Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) + * Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) + * Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) + * Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) + * Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) + * When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) + * LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) + * Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (``delete_after``) [#63650](https://github.com/saltstack/salt/issues/63650) + * When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) + * have salt.template.compile_template_str cleanup its temp files. [#63724](https://github.com/saltstack/salt/issues/63724) + * Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) + * Fixed an issue with generating fingerprints for public keys with different line endings [#63742](https://github.com/saltstack/salt/issues/63742) + * Change default GPG keyserver from pgp.mit.edu to keys.openpgp.org. [#63806](https://github.com/saltstack/salt/issues/63806) + * fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) + * Ensure kwargs is passed along to _call_apt when passed into install function. [#63847](https://github.com/saltstack/salt/issues/63847) + * remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) + * add linux_distribution to util to stop dep warning [#63904](https://github.com/saltstack/salt/issues/63904) + * Handle the situation when a sub proxy minion does not init properly, eg. an exception happens, and the sub proxy object is not available. [#63923](https://github.com/saltstack/salt/issues/63923) + * Clarifying documentation for extension_modules configuration option. [#63929](https://github.com/saltstack/salt/issues/63929) + * Windows pkg module now properly handles versions containing strings [#63935](https://github.com/saltstack/salt/issues/63935) + * Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn't used by that function. [#63948](https://github.com/saltstack/salt/issues/63948) + * Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows [#63981](https://github.com/saltstack/salt/issues/63981) + + # Added + + * Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `*-salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) + * Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) + * Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) + * Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) + * A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) + * Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) + * Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) + * Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) + * Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) + * State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) + * Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) + * Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) + * Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) + * Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) + * Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) + * Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) + * Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) + * Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) + * Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) + * Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) + * Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) + * Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) + * 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) + * Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) + * Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) + * Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) + * Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) + * Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) + * Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) + * Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) + * Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) + * Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) + * Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) + * Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) + * Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) + * Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) + * Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) + * Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) + * Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) + * Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) + * Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) + * Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) + * Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) + * salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) + * Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) + * Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) + * Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) + * Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) + * Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) + * Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) + * Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) + * Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) + * Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) + * Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) + * Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) + * Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) + * Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) + + # Security + + * Upgrade Requirements Due to Security Issues. + + * Upgrade to `cryptography>=39.0.1` due to: + * https://github.com/advisories/GHSA*x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA*w7pp-m8wf-vj6r + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. + * Update to `markdown*it-py==2.2.0` due to: + * https://github.com/advisories/GHSA*jrwr-5x3p-hvc3 + * https://github.com/advisories/GHSA*vrjv-mxr7-vjf8 [#63882](https://github.com/saltstack/salt/issues/63882) + + + -- Salt Project Packaging Wed, 29 Mar 2023 19:31:17 +0000 + +salt (1:3006.0rc2) stable; urgency=medium + + + # Removed + + * Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) + * Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) + * Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) + * Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) + * Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + + # Deprecated + + * renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) + * Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) + * The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) + * The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) + * Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) + * Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) + * Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) + * Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) + * Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) + * Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) + * `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + + # Changed + + * More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) + * Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) + * ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) + * Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) + * Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) + * Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) + * Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) + * Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) + * Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) + * Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) + * Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) + * Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) + * netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt*api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt*api. [#63050](https://github.com/saltstack/salt/issues/63050) + * Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) + * Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) + * Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) + + # Fixed + + * Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) + * Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) + * Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) + * Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) + * Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) + * Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) + * Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) + * Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) + * Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) + * Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) + * Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) + * Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) + * Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) + * When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) + * Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) + * Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) + * Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) + * Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) + * Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) + * Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) + * Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) + * Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) + * Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) + * Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) + * Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) + * Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) + * Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) + * Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) + * Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) + * Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) + * Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) + * linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) + * Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) + * Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) + * Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) + * Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) + * Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) + * Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) + * Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) + * Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) + * Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) + * add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) + * Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) + * Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) + * Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) + * Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) + * Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) + * Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) + * Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) + * Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) + * Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) + * Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) + * Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) + * Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) + * Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) + * state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) + * Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) + * Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) + * Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb*release is installed or not. They have been changed to + only derive these OS grains from `/etc/os*release`. [#61618](https://github.com/saltstack/salt/issues/61618) + * Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) + * Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) + * Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) + * Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) + * Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) + * Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) + * Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) + * When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) + * Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) + * win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) + * win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) + * Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) + * Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) + * Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) + * Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) + * Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) + * Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) + * Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) + * Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) + * Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) + * Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) + * Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) + * Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) + * Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) + * Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) + * Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) + * Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) + * Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) + * Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) + * Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) + * Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) + * Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) + * Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) + * Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) + * Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) + * Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) + * Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) + * Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) + * Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) + * Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) + * Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) + * Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) + * Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) + * Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) + * Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) + * Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) + * When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) + * Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) + * Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) + * When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) + * The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) + * sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) + * Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) + * Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) + * Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) + * Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) + * Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) + * Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) + * Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) + * Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) + * Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) + * Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) + * Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) + * Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) + * Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) + * Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) + * Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) + * Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) + * Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) + * Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) + * updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) + * Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) + * Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) + * Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) + * Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) + * Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) + * Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) + * Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) + * Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) + * Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) + * Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) + * Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) + * Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) + * Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) + * Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) + * Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) + * Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) + * Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) + * LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) + * Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) + * Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) + * Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) + * Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) + * Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) + * removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) + * Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) + * Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) + * Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) + * Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) + * Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) + * Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) + * Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) + * Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) + * Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) + * The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) + * Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) + * Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) + * Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) + * Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) + * Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) + * TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) + * Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) + * Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) + * Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) + * Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) + * Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) + * When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) + * LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) + * When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) + * Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) + * fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) + * remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) + + # Added + + * Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `*-salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) + * Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) + * Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) + * Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) + * A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) + * Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) + * Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) + * Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) + * Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) + * State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) + * Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) + * Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) + * Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) + * Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) + * Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) + * Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) + * Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) + * Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) + * Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) + * Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) + * Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) + * Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) + * 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) + * Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) + * Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) + * Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) + * Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) + * Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) + * Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) + * Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) + * Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) + * Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) + * Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) + * Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) + * Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) + * Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) + * Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) + * Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) + * Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) + * Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) + * Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) + * Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) + * Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) + * salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) + * Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) + * Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) + * Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) + * Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) + * Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) + * Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) + * Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) + * Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) + * Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) + * Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) + * Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) + * Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) + * Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) + + + -- Salt Project Packaging Sun, 19 Mar 2023 12:34:47 +0000 + salt (1:3006.0rc1) stable; urgency=medium diff --git a/pkg/debian/control b/pkg/debian/control index c83031018599..c08d99d5e23a 100644 --- a/pkg/debian/control +++ b/pkg/debian/control @@ -14,9 +14,18 @@ Vcs-Browser: https://github.com/saltstack/salt.git Vcs-Git: git://github.com/saltstack/salt.git +Package: salt-dbg +Architecture: amd64 arm64 +Section: debug +Priority: extra +Homepage: http://saltproject.io/ +Description: Salt debug symbols + + Package: salt-common Architecture: amd64 arm64 Depends: ${misc:Depends} +Breaks: salt-minion (<= 3006.4) Suggests: ifupdown Recommends: lsb-release Description: shared libraries that salt requires for all packages @@ -42,8 +51,8 @@ Description: shared libraries that salt requires for all packages Package: salt-master Architecture: amd64 arm64 -Replaces: salt-common (<= 3005.1+ds-4) -Breaks: salt-common (<= 3005.1+ds-4) +Replaces: salt-common (<= 3006.4) +Breaks: salt-common (<= 3006.4) Depends: salt-common (= ${source:Version}), ${misc:Depends} Description: remote manager to administer servers via salt @@ -68,6 +77,8 @@ Description: remote manager to administer servers via salt Package: salt-minion Architecture: amd64 arm64 +Replaces: salt-common (<= 3006.4) +Breaks: salt-common (<= 3006.4) Depends: bsdmainutils, dctrl-tools, salt-common (= ${source:Version}), @@ -120,6 +131,7 @@ Description: master-of-masters for salt, the distributed remote execution system Package: salt-ssh Architecture: amd64 arm64 +Breaks: salt-common (<= 3006.4) Depends: salt-common (= ${source:Version}), openssh-client, ${misc:Depends} @@ -148,6 +160,7 @@ Description: remote manager to administer servers via Salt SSH Package: salt-cloud Architecture: amd64 arm64 +Breaks: salt-common (<= 3006.4) Depends: salt-common (= ${source:Version}), ${misc:Depends} Description: public cloud VM management system diff --git a/pkg/debian/rules b/pkg/debian/rules index 98a99d97a176..3d39abe7ab18 100755 --- a/pkg/debian/rules +++ b/pkg/debian/rules @@ -1,6 +1,8 @@ #!/usr/bin/make -f DH_VERBOSE = 1 +.PHONY: override_dh_strip + %: dh $@ @@ -15,26 +17,23 @@ override_dh_auto_clean: ifeq ("${SALT_ONEDIR_ARCHIVE}", "") override_dh_auto_build: + export FETCH_RELENV_VERSION=$${SALT_RELENV_VERSION} mkdir -p build/onedir python3 -m venv --clear --copies build/onedir/venv - build/onedir/venv/bin/python3 -m pip install relenv - build/onedir/venv/bin/relenv fetch - build/onedir/venv/bin/relenv toolchain fetch - build/onedir/venv/bin/relenv create build/onedir/salt - build/onedir/salt/bin/python3 -m pip install "pip>=22.3.1,<23.0" "setuptools>=65.6.3,<66" "wheel" - export PY=$$(build/onedir/salt/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') \ - && build/onedir/salt/bin/python3 -m pip install -r requirements/static/pkg/py$${PY}/linux.txt + build/onedir/venv/bin/python3 -m pip install relenv==$${SALT_RELENV_VERSION} + export FETCH_RELENV_VERSION=$${SALT_RELENV_VERSION} + export PY=$$(build/onedir/venv/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') \ + && build/onedir/venv/bin/python3 -m pip install -r requirements/static/ci/py$${PY}/tools.txt + build/onedir/venv/bin/relenv fetch --arch=$${SALT_PACKAGE_ARCH} --python=$${SALT_PYTHON_VERSION} + build/onedir/venv/bin/relenv toolchain fetch --arch=$${SALT_PACKAGE_ARCH} + build/onedir/venv/bin/tools pkg build onedir-dependencies --arch $${SALT_PACKAGE_ARCH} --relenv-version=$${SALT_RELENV_VERSION} --python-version $${SALT_PYTHON_VERSION} --package-name build/onedir/salt --platform linux # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find build/onedir/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; - export USE_STATIC_REQUIREMENTS=1 \ - && export RELENV_PIP_DIR=1 \ - && build/onedir/salt/bin/python3 -m pip install --no-warn-script-location . - build/onedir/salt/bin/python3 -m venv --clear --copies build/onedir/tools - export PY=$$(build/onedir/tools/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') \ - && build/onedir/tools/bin/python3 -m pip install -r requirements/static/ci/py$${PY}/tools.txt - build/onedir/tools/bin/tools pkg pre-archive-cleanup --pkg build/onedir/salt + build/onedir/venv/bin/tools pkg build salt-onedir . --package-name build/onedir/salt --platform linux + build/onedir/venv/bin/tools pkg pre-archive-cleanup --pkg build/onedir/salt + else override_dh_auto_build: # The relenv onedir is being provided, all setup up until Salt is installed @@ -44,6 +43,7 @@ override_dh_auto_build: # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find build/onedir/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; + endif # dh_auto_install tries to invoke distutils causing failures. @@ -53,4 +53,12 @@ override_dh_auto_install: override_dh_install: mkdir -p debian/salt-common/opt/saltstack cp -R build/onedir/salt debian/salt-common/opt/saltstack/ + + # Generate master config + mkdir -p debian/salt-master/etc/salt + sed 's/#user: root/user: salt/g' conf/master > debian/salt-master/etc/salt/master + dh_install + +override_dh_strip: + dh_strip --dbg-package=salt-dbg diff --git a/pkg/debian/salt-api.postinst b/pkg/debian/salt-api.postinst new file mode 100644 index 000000000000..9345d72bf2aa --- /dev/null +++ b/pkg/debian/salt-api.postinst @@ -0,0 +1,10 @@ +case "$1" in + configure) + if [ ! -e "/var/log/salt/api" ]; then + touch /var/log/salt/api + chmod 640 /var/log/salt/api + fi + chown salt:salt /var/log/salt/api + if command -v systemctl; then systemctl enable salt-api; fi + ;; +esac diff --git a/pkg/debian/salt-cloud.install b/pkg/debian/salt-cloud.install new file mode 100644 index 000000000000..b00b83321e1b --- /dev/null +++ b/pkg/debian/salt-cloud.install @@ -0,0 +1 @@ +conf/cloud /etc/salt diff --git a/pkg/debian/salt-cloud.postinst b/pkg/debian/salt-cloud.postinst new file mode 100644 index 000000000000..a92551161da0 --- /dev/null +++ b/pkg/debian/salt-cloud.postinst @@ -0,0 +1,6 @@ +case "$1" in + configure) + PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush;") + chown -R salt:salt /etc/salt/cloud.deploy.d /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy + ;; +esac diff --git a/pkg/debian/salt-common.conffiles b/pkg/debian/salt-common.conffiles new file mode 100644 index 000000000000..595731d1d020 --- /dev/null +++ b/pkg/debian/salt-common.conffiles @@ -0,0 +1 @@ +/etc/logrotate.d/salt-common diff --git a/pkg/debian/salt-common.dirs b/pkg/debian/salt-common.dirs index 1379962cb343..381ec1f48ce0 100644 --- a/pkg/debian/salt-common.dirs +++ b/pkg/debian/salt-common.dirs @@ -1,4 +1,7 @@ /var/cache/salt /var/log/salt +/var/run/salt /usr/share/fish/vendor_completions.d -/opt/saltstack/salt/pypath +/opt/saltstack/salt +/etc/salt +/etc/logrotate.d diff --git a/pkg/debian/salt-common.install b/pkg/debian/salt-common.install index 4b612bd3aa6c..4f8dac552ece 100644 --- a/pkg/debian/salt-common.install +++ b/pkg/debian/salt-common.install @@ -1,7 +1,4 @@ -pkg/common/salt-proxy@.service /lib/systemd/system -conf/roster /etc/salt -conf/cloud /etc/salt -conf/proxy /etc/salt +pkg/common/logrotate/salt-common /etc/logrotate.d pkg/common/fish-completions/salt-cp.fish /usr/share/fish/vendor_completions.d pkg/common/fish-completions/salt-call.fish /usr/share/fish/vendor_completions.d pkg/common/fish-completions/salt-syndic.fish /usr/share/fish/vendor_completions.d diff --git a/pkg/debian/salt-common.postinst b/pkg/debian/salt-common.postinst new file mode 100644 index 000000000000..c5a8d969b450 --- /dev/null +++ b/pkg/debian/salt-common.postinst @@ -0,0 +1,4 @@ +#!/bin/sh +set -e + +/opt/saltstack/salt/bin/python3 -m compileall -qq /opt/saltstack/salt/lib diff --git a/pkg/debian/salt-common.preinst b/pkg/debian/salt-common.preinst new file mode 100644 index 000000000000..0e45d2399f68 --- /dev/null +++ b/pkg/debian/salt-common.preinst @@ -0,0 +1,39 @@ +case "$1" in + install|upgrade) + [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt + [ -z "$SALT_USER" ] && SALT_USER=salt + [ -z "$SALT_NAME" ] && SALT_NAME="Salt" + [ -z "$SALT_GROUP" ] && SALT_GROUP=salt + [ -z "$SALT_SHELL" ] && SALT_SHELL=/usr/sbin/nologin + + # create user to avoid running server as root + # 1. create group if not existing + if ! getent group | grep -q "^$SALT_GROUP:" ; then + echo -n "Adding group $SALT_GROUP.." + addgroup --quiet --system $SALT_GROUP 2>/dev/null ||true + echo "..done" + fi + # 2. create homedir if not existing + test -d $SALT_HOME || mkdir -p $SALT_HOME + # 3. create user if not existing + if ! getent passwd | grep -q "^$SALT_USER:"; then + echo -n "Adding system user $SALT_USER.." + useradd --system \ + --no-create-home \ + -s $SALT_SHELL \ + -g $SALT_GROUP \ + $SALT_USER 2>/dev/null || true + echo "..done" + fi + # 4. adjust passwd entry + usermod -c "$SALT_NAME" \ + -d $SALT_HOME \ + -s $SALT_SHELL \ + -g $SALT_GROUP \ + $SALT_USER + + # Remove incorrectly installed logrotate config - issue 65231 + test -d /etc/logrotate.d/salt && rm -r /etc/logrotate.d/salt || /bin/true + + ;; +esac diff --git a/pkg/debian/salt-common.prerm b/pkg/debian/salt-common.prerm new file mode 100644 index 000000000000..236c2bd3d12d --- /dev/null +++ b/pkg/debian/salt-common.prerm @@ -0,0 +1,5 @@ +#!/bin/sh +set -e + +dpkg -L salt-common | perl -ne 's,/([^/]*)\.py$,/__pycache__/\1.*, or next; unlink $_ or die $! foreach glob($_)' +find /opt/saltstack/salt -type d -name __pycache__ -empty -print0 | xargs --null --no-run-if-empty rmdir diff --git a/pkg/debian/salt-master.dirs b/pkg/debian/salt-master.dirs index 2caa6c7a4427..aba501b4379e 100644 --- a/pkg/debian/salt-master.dirs +++ b/pkg/debian/salt-master.dirs @@ -1 +1,15 @@ /etc/salt/master.d +/etc/salt/pki/master/minions +/etc/salt/pki/master/minions_autosign +/etc/salt/pki/master/minions_denied +/etc/salt/pki/master/minions_pre +/etc/salt/pki/master/minions_rejected +/var/cache/salt/master +/var/cache/salt/minion +/var/cache/salt/master/jobs +/var/cache/salt/master/proc +/var/cache/salt/master/queues +/var/cache/salt/master/roots +/var/cache/salt/master/syndics +/var/cache/salt/master/tokens +/var/run/salt/master diff --git a/pkg/debian/salt-master.install b/pkg/debian/salt-master.install index 1dc8a04ef55e..809b5141b1db 100644 --- a/pkg/debian/salt-master.install +++ b/pkg/debian/salt-master.install @@ -1,2 +1,2 @@ -conf/master /etc/salt pkg/common/salt-master.service /lib/systemd/system +pkg/common/salt.ufw /etc/ufw/applications.d diff --git a/pkg/debian/salt-master.postinst b/pkg/debian/salt-master.postinst new file mode 100644 index 000000000000..4f7686d8ed9c --- /dev/null +++ b/pkg/debian/salt-master.postinst @@ -0,0 +1,14 @@ +case "$1" in + configure) + if [ ! -e "/var/log/salt/master" ]; then + touch /var/log/salt/master + chmod 640 /var/log/salt/master + fi + if [ ! -e "/var/log/salt/key" ]; then + touch /var/log/salt/key + chmod 640 /var/log/salt/key + fi + chown -R salt:salt /etc/salt/pki/master /etc/salt/master.d /var/log/salt/master /var/log/salt/key /var/cache/salt/master /var/run/salt/master + if command -v systemctl; then systemctl enable salt-master; fi + ;; +esac diff --git a/pkg/debian/salt-master.preinst b/pkg/debian/salt-master.preinst new file mode 100644 index 000000000000..f205423079c0 --- /dev/null +++ b/pkg/debian/salt-master.preinst @@ -0,0 +1,18 @@ +case "$1" in + install|upgrade) + [ -z "$SALT_HOME" ] && SALT_HOME=/opt/saltstack/salt + [ -z "$SALT_USER" ] && SALT_USER=salt + [ -z "$SALT_NAME" ] && SALT_NAME="Salt" + [ -z "$SALT_GROUP" ] && SALT_GROUP=salt + PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush();") + + # Reset permissions to fix previous installs + find ${SALT_HOME} /etc/salt /var/log/salt /var/cache/salt /var/run/salt \ + \! \( -path /etc/salt/cloud.deploy.d\* -o -path /var/log/salt/cloud -o -path /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy\* \) -a \ + \( -user ${SALT_USER} -o -group ${SALT_GROUP} \) -exec chown root:root \{\} \; + + # remove incorrectly installed ufw salt-master directory - issue 57712 + test -d /etc/ufw/applications.d/salt-master && rm -rf /etc/ufw/applications.d/salt-master || /bin/true + + ;; +esac diff --git a/pkg/debian/salt-minion.install b/pkg/debian/salt-minion.install index 4fc4633bda82..d7a23a423bdb 100644 --- a/pkg/debian/salt-minion.install +++ b/pkg/debian/salt-minion.install @@ -1,2 +1,4 @@ conf/minion /etc/salt +conf/proxy /etc/salt pkg/common/salt-minion.service /lib/systemd/system +pkg/common/salt-proxy@.service /lib/systemd/system diff --git a/pkg/debian/salt-ssh.install b/pkg/debian/salt-ssh.install new file mode 100644 index 000000000000..b2e2e88243b3 --- /dev/null +++ b/pkg/debian/salt-ssh.install @@ -0,0 +1 @@ +conf/roster /etc/salt diff --git a/pkg/macos/build.sh b/pkg/macos/build.sh index 5cc0d0e5493b..bf00952908ff 100755 --- a/pkg/macos/build.sh +++ b/pkg/macos/build.sh @@ -119,13 +119,34 @@ _usage() { echo "usage: ${0}" echo " [-h|--help] [-v|--version]" echo "" - echo " -h, --help this message" - echo " -v, --version version of Salt display in the package" + echo " -h, --help Display this message" + echo " -v, --version Version of Salt to display in the package" + echo " -p, --python-version Version of python to install using relenv." + echo " The python version is tied to the relenv" + echo " version" + echo " -r, --relenv-version Version of relenv to install" echo "" echo " Build a Salt package:" echo " example: $0 3006.1-1" } +function _parse_yaml { + local prefix=$2 + local s='[[:space:]]*' w='[a-zA-Z0-9_]*' fs=$(echo @|tr @ '\034') + sed -ne "s|^\($s\):|\1|" \ + -e "s|^\($s\)\($w\)$s:$s[\"']\(.*\)[\"']$s\$|\1$fs\2$fs\3|p" \ + -e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" $1 | + awk -F$fs '{ + indent = length($1)/2; + vname[indent] = $2; + for (i in vname) {if (i > indent) {delete vname[i]}} + if (length($3) > 0) { + vn=""; for (i=0; i indent) {delete vname[i]}} + if (length($3) > 0) { + vn=""; for (i=0; i/dev/null 2>&1 -if [ -n "$(pip show relenv)" ]; then +if [ -n "${RELENV_VERSION}" ]; then + pip install relenv==${RELENV_VERSION} +else + pip install relenv +fi +if [ -n "$(relenv --version)" ]; then _success else _failure fi +export RELENV_FETCH_VERSION=$(relenv --version) #------------------------------------------------------------------------------- # Building Python with Relenv #------------------------------------------------------------------------------- if [ $BUILD -gt 0 ]; then echo "- Building python (relenv):" - relenv build --clean + relenv build --clean --python=$PY_VERSION else # We want to suppress the output here so it looks nice # To see the output, remove the output redirection _msg "Fetching python (relenv)" - relenv fetch >/dev/null 2>&1 - if [ -f "$RELENV_DIR/build/x86_64-macos.tar.xz" ]; then - _success - else - _failure - fi + relenv fetch --python=$PY_VERSION && _success || _failure fi _msg "Extracting python environment" -relenv create "$BUILD_DIR/opt/salt" +relenv create --python=$PY_VERSION "$BUILD_DIR/opt/salt" if [ -f "$BLD_PY_BIN" ]; then _success else @@ -250,6 +257,7 @@ fi #------------------------------------------------------------------------------- # Removing Unneeded Libraries from Python #------------------------------------------------------------------------------- +PY_VERSION_MINOR=$($BLD_PY_BIN -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info))') REMOVE=( "idlelib" "test" @@ -257,16 +265,10 @@ REMOVE=( "turtledemo" ) for i in "${REMOVE[@]}"; do - TEST_DIR="$BUILD_DIR/opt/salt/lib/python3.*/$i" - DIR=$(compgen -G "$TEST_DIR") - if [ -n "$DIR" ]; then + TEST_DIR="$BUILD_DIR/opt/salt/lib/python${PY_VERSION_MINOR}/$i" + if [ -d "$TEST_DIR" ]; then _msg "Removing $i directory" - rm -rf "$DIR" - if ! compgen -G "$TEST_DIR" > /dev/null; then - _success - else - _failure - fi + rm -rf "$TEST_DIR" && _success || _failure fi done diff --git a/pkg/macos/notarize.sh b/pkg/macos/notarize.sh index 1b0315931213..87c9cf2adeb7 100755 --- a/pkg/macos/notarize.sh +++ b/pkg/macos/notarize.sh @@ -6,17 +6,19 @@ # Date: December 2020 # # Description: This notarizes the macOS Installer Package (.pkg). It uses the -# `altool` xcode utility which is only available in the full -# Xcode package. It is not available in Command Line Tools. +# `notarytool` xcode utility which became available in Xcode 13. +# Xcode 13 requires macOS Big Sur 11.3 or higher. However, the +# notarytool binary can be extracted and run on macOS Catalina +# 10.15.7 and higher. It is not available in Command Line Tools. # # This script will upload a copy of the package to Apple and wait # for the notarization to return. This can take several minutes. # -# If this command is run with sudo, you need to pass the `-E` -# option to make sure the environment variables pass through to the -# sudo environment. For example: +# This script requires the presence of some environment variables. +# If running this script with sudo, be sure to pass the `-E` +# option. # -# sudo -E ./notarize.sh +# sudo -E ./notarize.sh salt-3006.2-signed.pkg # # Requirements: # - Full Xcode Installation @@ -31,24 +33,26 @@ # The package that will be notarized (must be signed) # # Example: -# The following will notarize the 'salt-3006.1-1-signed.pkg' file +# The following will notarize the 'salt-3006.2-signed.pkg' file: # -# ./notarize.sh salt-3006.1-1-signed.pkg +# ./notarize.sh salt-3006.2-signed.pkg # # Environment Setup: # # Define Environment Variables: -# Create two environment variables for the Apple account and the -# app-specific password associated with that account. To generate the -# app-specific password see: https://support.apple.com/en-us/HT204397 +# Create three environment variables for the apple account, apple team +# ID, and the app-specific password associated with that account. To +# generate the app-specific password see: +# https://support.apple.com/en-us/HT204397 # # export APPLE_ACCT="username@domain.com" +# export APPLE_TEAM_ID="AB283DVDS5" # export APP_SPEC_PWD="abcd-efgh-ijkl-mnop" # ################################################################################ #------------------------------------------------------------------------------- -# Variables +# Check input parameters #------------------------------------------------------------------------------- if [ "$1" == "" ]; then echo "Must supply a package to notarize" @@ -57,28 +61,9 @@ else PACKAGE=$1 fi -BUNDLE_ID="com.saltstack.salt" -CMD_OUTPUT=$(mktemp -t cmd.log) - #------------------------------------------------------------------------------- # Functions #------------------------------------------------------------------------------- -# _usage -# -# Prints out help text -_usage() { - echo "" - echo "Script to notarize the Salt package:" - echo "" - echo "usage: ${0}" - echo " [-h|--help]" - echo "" - echo " -h, --help this message" - echo "" - echo " To notarize the Salt package:" - echo " example: $0 salt-3006.1-1-signed.pkg" -} - # _msg # # Prints the message with a dash... no new line @@ -99,38 +84,32 @@ _success() { _failure() { printf "\e[31m%s\e[0m\n" "Failure" echo "output >>>>>>" - cat "$CMD_OUTPUT" 1>&2 + cat "$NOTARIZE_LOG" 1>&2 echo "<<<<<< output" exit 1 } #------------------------------------------------------------------------------- -# Get Parameters -#------------------------------------------------------------------------------- -while true; do - if [[ -z "$1" ]]; then break; fi - case "$1" in - -h | --help ) - _usage - exit 0 - ;; - -*) - echo "Invalid Option: $1" - echo "" - _usage - exit 1 - ;; - * ) - shift - ;; - esac -done +# Environment Variables +#------------------------------------------------------------------------------- +_msg "Setting Variables" +NOTARIZE_LOG=$(mktemp -t notarize-app.log) +NOTARY_TOOL=$(xcrun --find notarytool) +_success + +#------------------------------------------------------------------------------- +# Check for notarytool +#------------------------------------------------------------------------------- +if [ ! -f "$NOTARY_TOOL" ]; then + echo "This script requires the NotaryTool binary" + exit 1 +fi #------------------------------------------------------------------------------- # Delete temporary files on exit #------------------------------------------------------------------------------- function finish { - rm "$CMD_OUTPUT" + rm "$NOTARIZE_LOG" } trap finish EXIT @@ -139,87 +118,34 @@ trap finish EXIT #------------------------------------------------------------------------------- printf "=%.0s" {1..80}; printf "\n" echo "Notarize Salt Package" +echo "- This can take up to 30 minutes" printf -- "-%.0s" {1..80}; printf "\n" #------------------------------------------------------------------------------- # Submit app for notarization #------------------------------------------------------------------------------- -_msg "Submitting package for notarization" -if xcrun altool --notarize-app \ - --primary-bundle-id "$BUNDLE_ID" \ - --username "$APPLE_ACCT" \ - --password "$APP_SPEC_PWD" \ - -f "$PACKAGE" > "$CMD_OUTPUT" 2>&1; then +_msg "Submitting Package for Notarization" +if $NOTARY_TOOL submit \ + --apple-id "$APPLE_ACCT" \ + --team-id "$APPLE_TEAM_ID" \ + --password "$APP_SPEC_PWD" \ + --wait \ + "$PACKAGE" > "$NOTARIZE_LOG" 2>&1; then _success else _failure fi -# Get RequestUUID from the CMD_OUTPUT -# Uncomment for debugging -# cat "$CMD_OUTPUT" - -_msg "Verifying successful upload" -if grep -q "No errors uploading" "$CMD_OUTPUT"; then +# Make sure the status is "Accepted", then staple +_msg "Verifying accepted status" +if grep -q "status: Accepted" "$NOTARIZE_LOG"; then _success else - echo ">>>>>> Failed Uploading Package <<<<<<" > "$CMD_OUTPUT" _failure fi -RequestUUID=$(awk -F ' = ' '/RequestUUID/ {print $2}' "$CMD_OUTPUT") - -# Clear CMD_OUTPUT -echo "" > "$CMD_OUTPUT" - -echo "- Checking Notarization Status (every 30 seconds):" -echo -n " " -# Though it usually takes 5 minutes, notarization can take up to 30 minutes -# Check status every 30 seconds for 40 minutes -tries=0 -while sleep 30; do - ((tries++)) - echo -n "." - - # check notarization status - if ! xcrun altool --notarization-info "$RequestUUID" \ - --username "$APPLE_ACCT" \ - --password "$APP_SPEC_PWD" > "$CMD_OUTPUT" 2>&1; then - echo "" - cat "$CMD_OUTPUT" 1>&2 - exit 1 - fi - - # Look for Status in the CMD_OUTPUT - # Uncomment for debugging - # cat "$CMD_OUTPUT" - - # Continue checking until Status is no longer "in progress" - if ! grep -q "Status: in progress" "$CMD_OUTPUT"; then - echo "" - break - fi - - if (( tries > 80 )); then - echo "" - echo "Failed after 40 minutes" - echo "Log: $CMD_OUTPUT" - cat "$CMD_OUTPUT" 1>&2 - exit 1 - fi - -done - -# Make sure the result is "success", then staple -if ! grep -q "Status: success" "$CMD_OUTPUT"; then - echo "**** There was a problem notarizing the package" - echo "**** View the log for details:" - awk -F ': ' '/LogFileURL/ {print $2}' "$CMD_OUTPUT" - exit 1 -fi -echo " Notarization Complete" -_msg "Stapling notarization to the package" -if xcrun stapler staple "$PACKAGE" > "$CMD_OUTPUT"; then +_msg "Stapling Notarization to the Package" +if xcrun stapler staple "$PACKAGE" > "$NOTARIZE_LOG"; then _success else _failure diff --git a/pkg/macos/package.sh b/pkg/macos/package.sh index bb1eaca61e31..6c5a2acb7b56 100755 --- a/pkg/macos/package.sh +++ b/pkg/macos/package.sh @@ -55,6 +55,10 @@ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" DIST_XML="$SCRIPT_DIR/distribution.xml" BUILD_DIR="$SCRIPT_DIR/build" CMD_OUTPUT=$(mktemp -t cmd_log.XXX) +SCRIPTS_DIR="$SCRIPT_DIR/dist_scripts" +# Get the python version from the relenv python +BLD_PY_BIN="$BUILD_DIR/opt/salt/bin/python3" +PY_VER=$($BLD_PY_BIN -c 'import sys; print(".".join(map(str, sys.version_info[:2])))') #------------------------------------------------------------------------------- # Functions @@ -71,7 +75,7 @@ _usage() { echo "" echo " -h, --help this message" echo " -v, --version version of Salt display in the package" - echo " -n, --nightly don't sign the package" + echo " -s, --sign Sign the package" echo "" echo " To build the Salt package:" echo " example: $0 3006.1-1" @@ -105,6 +109,7 @@ _failure() { #------------------------------------------------------------------------------- # Get Parameters #------------------------------------------------------------------------------- +SIGN=0 while true; do if [[ -z "$1" ]]; then break; fi case "$1" in @@ -112,8 +117,8 @@ while true; do _usage exit 0 ;; - -n | --nightly ) - NIGHTLY=1 + -s | --sign ) + SIGN=1 shift ;; -v | --version ) @@ -230,6 +235,35 @@ else _failure fi +if [ -d "$SCRIPTS_DIR" ]; then + _msg "Removing existing scripts directory" + rm -f "$SCRIPTS_DIR" + if ! [ -d "$SCRIPTS_DIR" ]; then + _success + else + _failure + fi +fi + +_msg "Creating scripts directory" +cp -r "$SCRIPT_DIR/pkg-scripts" "$SCRIPTS_DIR" +if [ -d "$SCRIPTS_DIR" ]; then + _success +else + CMD_OUTPUT="Failed to copy: $SCRIPTS_DIR" + _failure +fi + +_msg "Setting python version for preinstall" +SED_STR="s/@PY_VER@/$PY_VER/g" +sed -i "" "$SED_STR" "$SCRIPTS_DIR/preinstall" +if grep -q "$PY_VER" "$SCRIPTS_DIR/preinstall"; then + _success +else + CMD_OUTPUT="Failed to set: $PY_VER" + _failure +fi + #------------------------------------------------------------------------------- # Build and Sign the Package #------------------------------------------------------------------------------- @@ -238,7 +272,7 @@ _msg "Building the source package" # Build the src package FILE="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg" if pkgbuild --root="$BUILD_DIR" \ - --scripts="$SCRIPT_DIR/pkg-scripts" \ + --scripts="$SCRIPTS_DIR" \ --identifier=com.saltstack.salt \ --version="$VERSION" \ --ownership=recommended \ @@ -249,17 +283,18 @@ else fi -if [ -z "${NIGHTLY}" ]; then +PKG_FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH.pkg" +if [ "${SIGN}" -eq 1 ]; then _msg "Building the product package (signed)" # This is not a nightly build, so we want to sign it - FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH-signed.pkg" + FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH.pkg" if productbuild --resources="$SCRIPT_DIR/pkg-resources" \ --distribution="$DIST_XML" \ --package-path="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg" \ --version="$VERSION" \ --sign "$DEV_INSTALL_CERT" \ --timestamp \ - "$FILE" > "$CMD_OUTPUT" 2>&1; then + "$PKG_FILE" > "$CMD_OUTPUT" 2>&1; then _success else _failure @@ -267,12 +302,11 @@ if [ -z "${NIGHTLY}" ]; then else _msg "Building the product package (unsigned)" # This is a nightly build, so we don't sign it - FILE="$SCRIPT_DIR/salt-$VERSION-py3-$CPU_ARCH-unsigned.pkg" if productbuild --resources="$SCRIPT_DIR/pkg-resources" \ --distribution="$DIST_XML" \ --package-path="$SCRIPT_DIR/salt-src-$VERSION-py3-$CPU_ARCH.pkg" \ --version="$VERSION" \ - "$FILE" > "$CMD_OUTPUT" 2>&1; then + "$PKG_FILE" > "$CMD_OUTPUT" 2>&1; then _success else _failure diff --git a/pkg/macos/pkg-scripts/preinstall b/pkg/macos/pkg-scripts/preinstall index 0dd2cf06abe2..ac66cb9d3c2c 100755 --- a/pkg/macos/pkg-scripts/preinstall +++ b/pkg/macos/pkg-scripts/preinstall @@ -96,12 +96,22 @@ if [ -L "$SBIN_DIR/salt-config" ]; then fi #------------------------------------------------------------------------------- -# Remove the $INSTALL_DIR directory +# Remove folders and files from the INSTALL_DIR +# Don't remove extras-3.## +# The part wrapped in `@` will be replaced by the correct version of python +# during packaging using SED #------------------------------------------------------------------------------- -if [ -d "$INSTALL_DIR" ]; then - log "Cleanup: Removing $INSTALL_DIR" - rm -rf "$INSTALL_DIR" - log "Cleanup: Removed Successfully" +for dir in "$INSTALL_DIR"/*/; do + if [[ "$dir" != *"extras-@PY_VER@"* ]]; then + log "Cleanup: Removing $dir" + rm -rf "$dir" + fi +done +log "Cleanup: Removed Directories Successfully" + +if [ -f "$INSTALL_DIR/salt-minion" ]; then + find $INSTALL_DIR -maxdepth 1 -type f -delete + log "Cleanup: Removed Files Successfully" fi #------------------------------------------------------------------------------- diff --git a/pkg/rpm/salt.spec b/pkg/rpm/salt.spec index 23ca707fbdb0..9bb75580e03a 100644 --- a/pkg/rpm/salt.spec +++ b/pkg/rpm/salt.spec @@ -14,6 +14,16 @@ %global __requires_exclude_from ^.*\\.so.*$ %define _source_payload w2.gzdio %define _binary_payload w2.gzdio +%define _SALT_GROUP salt +%define _SALT_USER salt +%define _SALT_NAME Salt +%define _SALT_HOME /opt/saltstack/salt + +# Disable debugsource template +%define _debugsource_template %{nil} + +# Needed for packages built from source. +%define _unpackaged_files_terminate_build 0 # Disable python bytecompile for MANY reasons %global __os_install_post %(echo '%{__os_install_post}' | sed -e 's!/usr/lib[^[:space:]]*/brp-python-bytecompile[[:space:]].*$!!g') @@ -21,13 +31,15 @@ %define fish_dir %{_datadir}/fish/vendor_functions.d Name: salt -Version: 3006.0~rc1 +Version: 3006.5 Release: 0 Summary: A parallel remote execution system Group: System Environment/Daemons License: ASL 2.0 URL: https://saltproject.io/ +Provides: salt = %{version} +Obsoletes: salt3 < 3006 BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) @@ -38,22 +50,25 @@ Requires: dmidecode Requires: pciutils Requires: which Requires: openssl +Requires: /usr/sbin/usermod +Requires: /usr/sbin/groupadd +Requires: /usr/sbin/useradd - -%if 0%{?systemd_preun:1} -Requires(post): systemd-units -Requires(preun): systemd-units -Requires(postun): systemd-units -%endif - -BuildRequires: systemd-units BuildRequires: python3 BuildRequires: python3-pip BuildRequires: openssl BuildRequires: git + +# rhel is not defined on all rpm based distros. +%if %{?rhel:1}%{!?rhel:0} %if %{rhel} >= 9 BuildRequires: libxcrypt-compat %endif +%endif + +# Build debuginfo package +%debug_package +%_no_recompute_build_ids 1 %description Salt is a distributed remote execution system used to execute commands and @@ -68,6 +83,8 @@ servers, handle them quickly and through a simple and manageable interface. Summary: Management component for salt, a parallel remote execution system Group: System Environment/Daemons Requires: %{name} = %{version}-%{release} +Provides: salt-master = %{version} +Obsoletes: salt3-master < 3006 %description master The Salt master is the central server to which all minions connect. @@ -77,6 +94,8 @@ The Salt master is the central server to which all minions connect. Summary: Client component for Salt, a parallel remote execution system Group: System Environment/Daemons Requires: %{name} = %{version}-%{release} +Provides: salt-minion = %{version} +Obsoletes: salt3-minion < 3006 %description minion The Salt minion is the agent component of Salt. It listens for instructions @@ -87,6 +106,8 @@ from the master, runs jobs, and returns results back to the master. Summary: Master-of-master component for Salt, a parallel remote execution system Group: System Environment/Daemons Requires: %{name}-master = %{version}-%{release} +Provides: salt-syndic = %{version} +Obsoletes: salt3-syndic < 3006 %description syndic The Salt syndic is a master daemon which can receive instruction from a @@ -98,6 +119,8 @@ infrastructure. Summary: REST API for Salt, a parallel remote execution system Group: Applications/System Requires: %{name}-master = %{version}-%{release} +Provides: salt-api = %{version} +Obsoletes: salt3-api < 3006 %description api salt-api provides a REST interface to the Salt master. @@ -107,6 +130,8 @@ salt-api provides a REST interface to the Salt master. Summary: Cloud provisioner for Salt, a parallel remote execution system Group: Applications/System Requires: %{name}-master = %{version}-%{release} +Provides: salt-cloud = %{version} +Obsoletes: salt3-cloud < 3006 %description cloud The salt-cloud tool provisions new cloud VMs, installs salt-minion on them, and @@ -117,6 +142,8 @@ adds them to the master's collection of controllable minions. Summary: Agentless SSH-based version of Salt, a parallel remote execution system Group: Applications/System Requires: %{name} = %{version}-%{release} +Provides: salt-ssh = %{version} +Obsoletes: salt3-ssh < 3006 %description ssh The salt-ssh tool can run remote execution functions and states without the use @@ -135,27 +162,28 @@ mkdir -p $RPM_BUILD_DIR/build cd $RPM_BUILD_DIR %if "%{getenv:SALT_ONEDIR_ARCHIVE}" == "" + export PIP_CONSTRAINT=%{_salt_src}/requirements/constraints.txt + export FETCH_RELENV_VERSION=${SALT_RELENV_VERSION} python3 -m venv --clear --copies build/venv - build/venv/bin/python3 -m pip install relenv - build/venv/bin/relenv fetch - build/venv/bin/relenv toolchain fetch - build/venv/bin/relenv create build/salt - build/salt/bin/python3 -m pip install "pip>=22.3.1,<23.0" "setuptools>=65.6.3,<66" "wheel" - export PY=$(build/salt/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') - build/salt/bin/python3 -m pip install -r %{_salt_src}/requirements/static/pkg/py${PY}/linux.txt + build/venv/bin/python3 -m pip install relenv==${SALT_RELENV_VERSION} + export FETCH_RELENV_VERSION=${SALT_RELENV_VERSION} + export PY=$(build/venv/bin/python3 -c 'import sys; sys.stdout.write("{}.{}".format(*sys.version_info)); sys.stdout.flush()') + build/venv/bin/python3 -m pip install -r %{_salt_src}/requirements/static/ci/py${PY}/tools.txt + build/venv/bin/relenv fetch --arch=${SALT_PACKAGE_ARCH} --python=${SALT_PYTHON_VERSION} + build/venv/bin/relenv toolchain fetch --arch=${SALT_PACKAGE_ARCH} + cd %{_salt_src} + $RPM_BUILD_DIR/build/venv/bin/tools pkg build onedir-dependencies --arch ${SALT_PACKAGE_ARCH} --relenv-version=${SALT_RELENV_VERSION} --python-version ${SALT_PYTHON_VERSION} --package-name $RPM_BUILD_DIR/build/salt --platform linux # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in # the /bin directory - find build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \; + find $RPM_BUILD_DIR/build/salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$(dirname $(readlink -f $0))/python3" "$0" "$@":g' {} \; - export USE_STATIC_REQUIREMENTS=1 - export RELENV_PIP_DIR=1 - build/salt/bin/python3 -m pip install --no-warn-script-location %{_salt_src} + $RPM_BUILD_DIR/build/venv/bin/tools pkg build salt-onedir . --package-name $RPM_BUILD_DIR/build/salt --platform linux + $RPM_BUILD_DIR/build/venv/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt + + # Generate master config + sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master - build/salt/bin/python3 -m venv --clear --copies build/tools - build/tools/bin/python3 -m pip install -r %{_salt_src}/requirements/static/ci/py${PY}/tools.txt - cd %{_salt_src} - $RPM_BUILD_DIR/build/tools/bin/tools pkg pre-archive-cleanup --pkg $RPM_BUILD_DIR/build/salt %else # The relenv onedir is being provided, all setup up until Salt is installed # is expected to be done @@ -165,6 +193,9 @@ cd $RPM_BUILD_DIR # Fix any hardcoded paths to the relenv python binary on any of the scripts installed in the /bin directory find salt/bin/ -type f -exec sed -i 's:#!/\(.*\)salt/bin/python3:#!/bin/sh\n"exec" "$$(dirname $$(readlink -f $$0))/python3" "$$0" "$$@":g' {} \; + # Generate master config + sed 's/#user: root/user: salt/g' %{_salt_src}/conf/master > $RPM_BUILD_DIR/build/master + cd $RPM_BUILD_DIR %endif @@ -176,12 +207,27 @@ cp -R $RPM_BUILD_DIR/build/salt %{buildroot}/opt/saltstack/ # Add some directories install -d -m 0755 %{buildroot}%{_var}/log/salt +install -d -m 0755 %{buildroot}%{_var}/run/salt +install -d -m 0755 %{buildroot}%{_var}/run/salt/master install -d -m 0755 %{buildroot}%{_var}/cache/salt +install -Dd -m 0750 %{buildroot}%{_var}/cache/salt/master +install -Dd -m 0750 %{buildroot}%{_var}/cache/salt/minion +install -Dd -m 0750 %{buildroot}%{_var}/cache/salt/master/jobs +install -Dd -m 0750 %{buildroot}%{_var}/cache/salt/master/proc +install -Dd -m 0750 %{buildroot}%{_var}/cache/salt/master/queues +install -Dd -m 0750 %{buildroot}%{_var}/cache/salt/master/roots +install -Dd -m 0750 %{buildroot}%{_var}/cache/salt/master/syndics +install -Dd -m 0750 %{buildroot}%{_var}/cache/salt/master/tokens install -d -m 0755 %{buildroot}%{_sysconfdir}/salt install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/master.d install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/minion.d install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/pki install -d -m 0700 %{buildroot}%{_sysconfdir}/salt/pki/master +install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions +install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_autosign +install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_denied +install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_pre +install -Dd -m 0750 %{buildroot}%{_sysconfdir}/salt/pki/master/minions_rejected install -d -m 0700 %{buildroot}%{_sysconfdir}/salt/pki/minion install -d -m 0700 %{buildroot}%{_sysconfdir}/salt/cloud.conf.d install -d -m 0700 %{buildroot}%{_sysconfdir}/salt/cloud.deploy.d @@ -208,7 +254,7 @@ install -m 0755 %{buildroot}/opt/saltstack/salt/salt-pip %{buildroot}%{_bindir}/ # Add the config files install -p -m 0640 %{_salt_src}/conf/minion %{buildroot}%{_sysconfdir}/salt/minion -install -p -m 0640 %{_salt_src}/conf/master %{buildroot}%{_sysconfdir}/salt/master +install -p -m 0640 $RPM_BUILD_DIR/build/master %{buildroot}%{_sysconfdir}/salt/master install -p -m 0640 %{_salt_src}/conf/cloud %{buildroot}%{_sysconfdir}/salt/cloud install -p -m 0640 %{_salt_src}/conf/roster %{buildroot}%{_sysconfdir}/salt/roster install -p -m 0640 %{_salt_src}/conf/proxy %{buildroot}%{_sysconfdir}/salt/proxy @@ -224,7 +270,7 @@ install -p -m 0644 %{_salt_src}/pkg/common/salt-proxy@.service %{buildroot}%{_un # Logrotate #install -p %{SOURCE10} . mkdir -p %{buildroot}%{_sysconfdir}/logrotate.d/ -install -p -m 0644 %{_salt_src}/pkg/common/salt-common.logrotate %{buildroot}%{_sysconfdir}/logrotate.d/salt +install -p -m 0644 %{_salt_src}/pkg/common/logrotate/salt-common %{buildroot}%{_sysconfdir}/logrotate.d/salt # Bash completion mkdir -p %{buildroot}%{_sysconfdir}/bash_completion.d/ @@ -264,6 +310,7 @@ rm -rf %{buildroot} %{_sysconfdir}/bash_completion.d/salt.bash %config(noreplace) %{fish_dir}/salt*.fish %dir %{_var}/cache/salt +%dir %{_var}/run/salt %dir %{_var}/log/salt %doc %{_mandir}/man1/spm.1* %{_bindir}/spm @@ -273,8 +320,6 @@ rm -rf %{buildroot} %dir %{_sysconfdir}/salt/pki - - %files master %defattr(-,root,root) %doc %{_mandir}/man7/salt.7* @@ -292,6 +337,21 @@ rm -rf %{buildroot} %config(noreplace) %{_sysconfdir}/salt/master %dir %{_sysconfdir}/salt/master.d %config(noreplace) %{_sysconfdir}/salt/pki/master +%dir %attr(0750, salt, salt) %{_sysconfdir}/salt/pki/master/ +%dir %attr(0750, salt, salt) %{_sysconfdir}/salt/pki/master/minions/ +%dir %attr(0750, salt, salt) %{_sysconfdir}/salt/pki/master/minions_autosign/ +%dir %attr(0750, salt, salt) %{_sysconfdir}/salt/pki/master/minions_denied/ +%dir %attr(0750, salt, salt) %{_sysconfdir}/salt/pki/master/minions_pre/ +%dir %attr(0750, salt, salt) %{_sysconfdir}/salt/pki/master/minions_rejected/ +%dir %attr(0750, salt, salt) %{_var}/run/salt/master/ +%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/ +%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/jobs/ +%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/proc/ +%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/queues/ +%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/roots/ +%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/syndics/ +%dir %attr(0750, salt, salt) %{_var}/cache/salt/master/tokens/ + %files minion %defattr(-,root,root) @@ -307,18 +367,22 @@ rm -rf %{buildroot} %config(noreplace) %{_sysconfdir}/salt/proxy %config(noreplace) %{_sysconfdir}/salt/pki/minion %dir %{_sysconfdir}/salt/minion.d +%dir %attr(0750, root, root) %{_var}/cache/salt/minion/ + %files syndic %doc %{_mandir}/man1/salt-syndic.1* %{_bindir}/salt-syndic %{_unitdir}/salt-syndic.service + %files api %defattr(-,root,root) %doc %{_mandir}/man1/salt-api.1* %{_bindir}/salt-api %{_unitdir}/salt-api.service + %files cloud %doc %{_mandir}/man1/salt-cloud.1* %{_bindir}/salt-cloud @@ -329,30 +393,68 @@ rm -rf %{buildroot} %{_sysconfdir}/salt/cloud.providers.d %config(noreplace) %{_sysconfdir}/salt/cloud + %files ssh %doc %{_mandir}/man1/salt-ssh.1* %{_bindir}/salt-ssh %config(noreplace) %{_sysconfdir}/salt/roster +%pre +# create user to avoid running server as root +# 1. create group if not existing +if ! getent group %{_SALT_GROUP}; then + groupadd --system %{_SALT_GROUP} 2>/dev/null ||true +fi +# 2. create homedir if not existing +test -d %{_SALT_HOME} || mkdir -p %{_SALT_HOME} +# 3. create user if not existing +# -g %{_SALT_GROUP} \ +if ! getent passwd | grep -q "^%{_SALT_USER}:"; then + useradd --system \ + --no-create-home \ + -s /sbin/nologin \ + -g %{_SALT_GROUP} \ + %{_SALT_USER} 2>/dev/null || true +fi +# 4. adjust passwd entry +usermod -c "%{_SALT_NAME}" \ + -d %{_SALT_HOME} \ + -g %{_SALT_GROUP} \ + %{_SALT_USER} + +%pre master +# Reset permissions to fix previous installs +PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush();") +find /etc/salt /opt/saltstack/salt /var/log/salt /var/cache/salt /var/run/salt \ + \! \( -path /etc/salt/cloud.deploy.d\* -o -path /var/log/salt/cloud -o -path /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy\* \) -a \ + \( -user salt -o -group salt \) -exec chown root:root \{\} \; + + # assumes systemd for RHEL 7 & 8 & 9 %preun master # RHEL 9 is giving warning msg if syndic is not installed, supress it %systemd_preun salt-syndic.service > /dev/null 2>&1 + %preun minion %systemd_preun salt-minion.service + %preun api %systemd_preun salt-api.service + %post ln -s -f /opt/saltstack/salt/spm %{_bindir}/spm ln -s -f /opt/saltstack/salt/salt-pip %{_bindir}/salt-pip +/opt/saltstack/salt/bin/python3 -m compileall -qq /opt/saltstack/salt/lib + %post cloud ln -s -f /opt/saltstack/salt/salt-cloud %{_bindir}/salt-cloud + %post master %systemd_post salt-master.service ln -s -f /opt/saltstack/salt/salt %{_bindir}/salt @@ -365,8 +467,12 @@ if [ $1 -lt 2 ]; then # ensure hmac are up to date, master or minion, rest install one or the other # key used is from openssl/crypto/fips/fips_standalone_hmac.c openssl 1.1.1k if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/libssl.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/libcrypto.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi @@ -384,8 +490,12 @@ if [ $1 -lt 2 ]; then # ensure hmac are up to date, master or minion, rest install one or the other # key used is from openssl/crypto/fips/fips_standalone_hmac.c openssl 1.1.1k if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/libssl.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libssl.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/libcrypto.so.1.1 ]; then + /bin/openssl sha256 -r -hmac orboDeJITITejsirpADONivirpUkvarP /opt/saltstack/salt/lib/libcrypto.so.1.1 | cut -d ' ' -f 1 > /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi @@ -396,14 +506,55 @@ ln -s -f /opt/saltstack/salt/salt-ssh %{_bindir}/salt-ssh %systemd_post salt-api.service ln -s -f /opt/saltstack/salt/salt-api %{_bindir}/salt-api + +%posttrans cloud +PY_VER=$(/opt/saltstack/salt/bin/python3 -c "import sys; sys.stdout.write('{}.{}'.format(*sys.version_info)); sys.stdout.flush();") +if [ ! -e "/var/log/salt/cloud" ]; then + touch /var/log/salt/cloud + chmod 640 /var/log/salt/cloud +fi +chown -R %{_SALT_USER}:%{_SALT_GROUP} /etc/salt/cloud.deploy.d /var/log/salt/cloud /opt/saltstack/salt/lib/python${PY_VER}/site-packages/salt/cloud/deploy + + +%posttrans master +if [ ! -e "/var/log/salt/master" ]; then + touch /var/log/salt/master + chmod 640 /var/log/salt/master +fi +if [ ! -e "/var/log/salt/key" ]; then + touch /var/log/salt/key + chmod 640 /var/log/salt/key +fi +chown -R %{_SALT_USER}:%{_SALT_GROUP} /etc/salt/pki/master /etc/salt/master.d /var/log/salt/master /var/log/salt/key /var/cache/salt/master /var/run/salt/master + + +%posttrans api +if [ ! -e "/var/log/salt/api" ]; then + touch /var/log/salt/api + chmod 640 /var/log/salt/api +fi +chown %{_SALT_USER}:%{_SALT_GROUP} /var/log/salt/api + + +%preun +if [ $1 -eq 0 ]; then + # Uninstall + find /opt/saltstack/salt -type f -name \*\.pyc -print0 | xargs --null --no-run-if-empty rm + find /opt/saltstack/salt -type d -name __pycache__ -empty -print0 | xargs --null --no-run-if-empty rmdir +fi + %postun master %systemd_postun_with_restart salt-master.service if [ $1 -eq 0 ]; then if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then if [ -z "$(rpm -qi salt-minion | grep Name | grep salt-minion)" ]; then # uninstall and no minion running - /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/.libssl.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi fi @@ -417,8 +568,12 @@ if [ $1 -eq 0 ]; then if [ $(cat /etc/os-release | grep VERSION_ID | cut -d '=' -f 2 | sed 's/\"//g' | cut -d '.' -f 1) = "8" ]; then if [ -z "$(rpm -qi salt-master | grep Name | grep salt-master)" ]; then # uninstall and no master running - /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : - /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + if [ -e /opt/saltstack/salt/lib/.libssl.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libssl.so.1.1.hmac || : + fi + if [ -e /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac ]; then + /bin/rm -f /opt/saltstack/salt/lib/.libcrypto.so.1.1.hmac || : + fi fi fi fi @@ -428,6 +583,1342 @@ fi %changelog +* Tue Dec 12 2023 Salt Project Packaging - 3006.5 + +# Removed + +- Tech Debt - support for pysss removed due to functionality addition in Python 3.3 [#65029](https://github.com/saltstack/salt/issues/65029) + +# Fixed + +- Improved error message when state arguments are accidentally passed as a string [#38098](https://github.com/saltstack/salt/issues/38098) +- Allow `pip.install` to create a log file that is passed in if the parent directory is writeable [#44722](https://github.com/saltstack/salt/issues/44722) +- Fixed merging of complex pillar overrides with salt-ssh states [#59802](https://github.com/saltstack/salt/issues/59802) +- Fixed gpg pillar rendering with salt-ssh [#60002](https://github.com/saltstack/salt/issues/60002) +- Made salt-ssh states not re-render pillars unnecessarily [#62230](https://github.com/saltstack/salt/issues/62230) +- Made Salt maintain options in Debian package repo definitions [#64130](https://github.com/saltstack/salt/issues/64130) +- Migrated all [`invoke`](https://www.pyinvoke.org/) tasks to [`python-tools-scripts`](https://github.com/s0undt3ch/python-tools-scripts). + + * `tasks/docs.py` -> `tools/precommit/docs.py` + * `tasks/docstrings.py` -> `tools/precommit/docstrings.py` + * `tasks/loader.py` -> `tools/precommit/loader.py` + * `tasks/filemap.py` -> `tools/precommit/filemap.py` [#64374](https://github.com/saltstack/salt/issues/64374) +- Fix salt user login shell path in Debian packages [#64377](https://github.com/saltstack/salt/issues/64377) +- Fill out lsb_distrib_xxxx (best estimate) grains if problems with retrieving lsb_release data [#64473](https://github.com/saltstack/salt/issues/64473) +- Fixed an issue in the ``file.directory`` state where the ``children_only`` keyword + argument was not being respected. [#64497](https://github.com/saltstack/salt/issues/64497) +- Move salt.ufw to correct location /etc/ufw/applications.d/ [#64572](https://github.com/saltstack/salt/issues/64572) +- Fixed salt-ssh stacktrace when retcode is not an integer [#64575](https://github.com/saltstack/salt/issues/64575) +- Fixed SSH shell seldomly fails to report any exit code [#64588](https://github.com/saltstack/salt/issues/64588) +- Fixed some issues in x509_v2 execution module private key functions [#64597](https://github.com/saltstack/salt/issues/64597) +- Fixed grp.getgrall() in utils/user.py causing performance issues [#64888](https://github.com/saltstack/salt/issues/64888) +- Fix user.list_groups omits remote groups via sssd, etc. [#64953](https://github.com/saltstack/salt/issues/64953) +- Ensure sync from _grains occurs before attempting pillar compilation in case custom grain used in pillar file [#65027](https://github.com/saltstack/salt/issues/65027) +- Moved gitfs locks to salt working dir to avoid lock wipes [#65086](https://github.com/saltstack/salt/issues/65086) +- Only attempt to create a keys directory when `--gen-keys` is passed to the `salt-key` CLI [#65093](https://github.com/saltstack/salt/issues/65093) +- Fix nonce verification, request server replies do not stomp on eachother. [#65114](https://github.com/saltstack/salt/issues/65114) +- speed up yumpkg list_pkgs by not requiring digest or signature verification on lookup. [#65152](https://github.com/saltstack/salt/issues/65152) +- Fix pkg.latest failing on windows for winrepo packages where the package is already up to date [#65165](https://github.com/saltstack/salt/issues/65165) +- Ensure __kwarg__ is preserved when checking for kwargs. This change affects proxy minions when used with Deltaproxy, which had kwargs popped when targeting multiple minions id. [#65179](https://github.com/saltstack/salt/issues/65179) +- Fixes traceback when state id is an int in a reactor SLS file. [#65210](https://github.com/saltstack/salt/issues/65210) +- Install logrotate config as /etc/logrotate.d/salt-common for Debian packages + Remove broken /etc/logrotate.d/salt directory from 3006.3 if it exists. [#65231](https://github.com/saltstack/salt/issues/65231) +- Use ``sha256`` as the default ``hash_type``. It has been the default since Salt v2016.9 [#65287](https://github.com/saltstack/salt/issues/65287) +- Preserve ownership on log rotation [#65288](https://github.com/saltstack/salt/issues/65288) +- Ensure that the correct value of jid_inclue is passed if the argument is included in the passed keyword arguments. [#65302](https://github.com/saltstack/salt/issues/65302) +- Uprade relenv to 0.14.2 + - Update openssl to address CVE-2023-5363. + - Fix bug in openssl setup when openssl binary can't be found. + - Add M1 mac support. [#65316](https://github.com/saltstack/salt/issues/65316) +- Fix regex for filespec adding/deleting fcontext policy in selinux [#65340](https://github.com/saltstack/salt/issues/65340) +- Ensure CLI options take priority over Saltfile options [#65358](https://github.com/saltstack/salt/issues/65358) +- Test mode for state function `saltmod.wheel` no longer set's `result` to `(None,)` [#65372](https://github.com/saltstack/salt/issues/65372) +- Client only process events which tag conforms to an event return. [#65400](https://github.com/saltstack/salt/issues/65400) +- Fixes an issue setting user or machine policy on Windows when the Group Policy + directory is missing [#65411](https://github.com/saltstack/salt/issues/65411) +- Fix regression in file module which was not re-using a file client. [#65450](https://github.com/saltstack/salt/issues/65450) +- pip.installed state will now properly fail when a specified user does not exists [#65458](https://github.com/saltstack/salt/issues/65458) +- Publish channel connect callback method properly closes it's request channel. [#65464](https://github.com/saltstack/salt/issues/65464) +- Ensured the pillar in SSH wrapper modules is the same as the one used in template rendering when overrides are passed [#65483](https://github.com/saltstack/salt/issues/65483) +- Fix file.comment ignore_missing not working with multiline char [#65501](https://github.com/saltstack/salt/issues/65501) +- Warn when an un-closed transport client is being garbage collected. [#65554](https://github.com/saltstack/salt/issues/65554) +- Only generate the HMAC's for ``libssl.so.1.1`` and ``libcrypto.so.1.1`` if those files exist. [#65581](https://github.com/saltstack/salt/issues/65581) +- Fixed an issue where Salt Cloud would fail if it could not delete lingering + PAexec binaries [#65584](https://github.com/saltstack/salt/issues/65584) + +# Added + +- Added Salt support for Debian 12 [#64223](https://github.com/saltstack/salt/issues/64223) +- Added Salt support for Amazon Linux 2023 [#64455](https://github.com/saltstack/salt/issues/64455) + +# Security + +- Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 [#65268](https://github.com/saltstack/salt/issues/65268) +- Bump to `cryptography==41.0.7` due to https://github.com/advisories/GHSA-jfhm-5ghh-2f97 [#65643](https://github.com/saltstack/salt/issues/65643) + + +* Mon Oct 16 2023 Salt Project Packaging - 3006.4 + +# Security + +- Fix CVE-2023-34049 by ensuring we do not use a predictable name for the script and correctly check returncode of scp command. + This only impacts salt-ssh users using the pre-flight option. [#cve-2023-34049](https://github.com/saltstack/salt/issues/cve-2023-34049) +- Update to `gitpython>=3.1.35` due to https://github.com/advisories/GHSA-wfm5-v35h-vwf4 and https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65163](https://github.com/saltstack/salt/issues/65163) +- Bump to `cryptography==41.0.4` due to https://github.com/advisories/GHSA-v8gr-m533-ghj9 [#65268](https://github.com/saltstack/salt/issues/65268) +- Upgrade relenv to 0.13.12 to address CVE-2023-4807 [#65316](https://github.com/saltstack/salt/issues/65316) +- Bump to `urllib3==1.26.17` or `urllib3==2.0.6` due to https://github.com/advisories/GHSA-v845-jxx5-vc9f [#65334](https://github.com/saltstack/salt/issues/65334) +- Bump to `gitpython==3.1.37` due to https://github.com/advisories/GHSA-cwvm-v4w8-q58c [#65383](https://github.com/saltstack/salt/issues/65383) + + +* Wed Sep 06 2023 Salt Project Packaging - 3006.3 + +# Removed + +- Fedora 36 support was removed because it reached EOL [#64315](https://github.com/saltstack/salt/issues/64315) +- Handle deprecation warnings: + + * Switch to `FullArgSpec` since Py 3.11 no longer has `ArgSpec`, deprecated since Py 3.0 + * Stop using the deprecated `cgi` module + * Stop using the deprecated `pipes` module + * Stop using the deprecated `imp` module [#64553](https://github.com/saltstack/salt/issues/64553) + +# Changed + +- Replace libnacl with PyNaCl [#64372](https://github.com/saltstack/salt/issues/64372) +- Don't hardcode the python version on the Salt Package tests and on the `pkg/debian/salt-cloud.postinst` file [#64553](https://github.com/saltstack/salt/issues/64553) +- Some more deprecated code fixes: + + * Stop using the deprecated `locale.getdefaultlocale()` function + * Stop accessing deprecated attributes + * `pathlib.Path.__enter__()` usage is deprecated and not required, a no-op [#64565](https://github.com/saltstack/salt/issues/64565) +- Bump to `pyyaml==6.0.1` due to https://github.com/yaml/pyyaml/issues/601 and address lint issues [#64657](https://github.com/saltstack/salt/issues/64657) + +# Fixed + +- Fix for assume role when used salt-cloud to create aws ec2. [#52501](https://github.com/saltstack/salt/issues/52501) +- fixes aptpkg module by checking for blank comps. [#58667](https://github.com/saltstack/salt/issues/58667) +- `wheel.file_roots.find` is now able to find files in subdirectories of the roots. [#59800](https://github.com/saltstack/salt/issues/59800) +- pkg.latest no longer fails when multiple versions are reported to be installed (e.g. updating the kernel) [#60931](https://github.com/saltstack/salt/issues/60931) +- Do not update the credentials dictionary in `utils/aws.py` while iterating over it, and use the correct delete functionality [#61049](https://github.com/saltstack/salt/issues/61049) +- fixed runner not having a proper exit code when runner modules throw an exception. [#61173](https://github.com/saltstack/salt/issues/61173) +- `pip.list_all_versions` now works with `index_url` and `extra_index_url` [#61610](https://github.com/saltstack/salt/issues/61610) +- speed up file.recurse by using prefix with cp.list_master_dir and remove an un-needed loop. [#61998](https://github.com/saltstack/salt/issues/61998) +- Preserve test=True condition while running sub states. [#62590](https://github.com/saltstack/salt/issues/62590) +- Job returns are only sent to originating master [#62834](https://github.com/saltstack/salt/issues/62834) +- Fixes an issue with failing subsequent state runs with the lgpo state module. + The ``lgpo.get_polcy`` function now returns all boolean settings. [#63296](https://github.com/saltstack/salt/issues/63296) +- Fix SELinux get policy with trailing whitespace [#63336](https://github.com/saltstack/salt/issues/63336) +- Fixes an issue with boolean settings not being reported after being set. The + ``lgpo.get_polcy`` function now returns all boolean settings. [#63473](https://github.com/saltstack/salt/issues/63473) +- Ensure body is returned when salt.utils.http returns something other than 200 with tornado backend. [#63557](https://github.com/saltstack/salt/issues/63557) +- Allow long running pillar and file client requests to finish using request_channel_timeout and request_channel_tries minion config. [#63824](https://github.com/saltstack/salt/issues/63824) +- Fix state_queue type checking to allow int values [#64122](https://github.com/saltstack/salt/issues/64122) +- Call global logger when catching pip.list exceptions in states.pip.installed + Rename global logger `log` to `logger` inside pip_state [#64169](https://github.com/saltstack/salt/issues/64169) +- Fixes permissions created by the Debian and RPM packages for the salt user. + + The salt user created by the Debian and RPM packages to run the salt-master process, was previously given ownership of various directories in a way which compromised the benefits of running the salt-master process as a non-root user. + + This fix sets the salt user to only have write access to those files and + directories required for the salt-master process to run. [#64193](https://github.com/saltstack/salt/issues/64193) +- Fix user.present state when groups is unset to ensure the groups are unchanged, as documented. [#64211](https://github.com/saltstack/salt/issues/64211) +- Fixes issue with MasterMinion class loading configuration from `/etc/salt/minion.d/*.conf. + + The MasterMinion class (used for running orchestraions on master and other functionality) was incorrectly loading configuration from `/etc/salt/minion.d/*.conf`, when it should only load configuration from `/etc/salt/master` and `/etc/salt/master.d/*.conf`. [#64219](https://github.com/saltstack/salt/issues/64219) +- Fixed issue in mac_user.enable_auto_login that caused the user's keychain to be reset at each boot [#64226](https://github.com/saltstack/salt/issues/64226) +- Fixed KeyError in logs when running a state that fails. [#64231](https://github.com/saltstack/salt/issues/64231) +- Fixed x509_v2 `create_private_key`/`create_crl` unknown kwargs: __pub_fun... [#64232](https://github.com/saltstack/salt/issues/64232) +- remove the hard coded python version in error. [#64237](https://github.com/saltstack/salt/issues/64237) +- `salt-pip` now properly errors out when being called from a non `onedir` environment. [#64249](https://github.com/saltstack/salt/issues/64249) +- Ensure we return an error when adding the key fails in the pkgrepo state for debian hosts. [#64253](https://github.com/saltstack/salt/issues/64253) +- Fixed file client private attribute reference on `SaltMakoTemplateLookup` [#64280](https://github.com/saltstack/salt/issues/64280) +- Fix pkgrepo.absent failures on apt-based systems when repo either a) contains a + trailing slash, or b) there is an arch mismatch. [#64286](https://github.com/saltstack/salt/issues/64286) +- Fix detection of Salt codename by "salt_version" execution module [#64306](https://github.com/saltstack/salt/issues/64306) +- Ensure selinux values are handled lowercase [#64318](https://github.com/saltstack/salt/issues/64318) +- Remove the `clr.AddReference`, it is causing an `Illegal characters in path` exception [#64339](https://github.com/saltstack/salt/issues/64339) +- Update `pkg.group_installed` state to support repo options [#64348](https://github.com/saltstack/salt/issues/64348) +- Fix salt user login shell path in Debian packages [#64377](https://github.com/saltstack/salt/issues/64377) +- Allow for multiple user's keys presented when authenticating, for example: root, salt, etc. [#64398](https://github.com/saltstack/salt/issues/64398) +- Fixed an issue with ``lgpo_reg`` where existing entries for the same key in + ``Registry.pol`` were being overwritten in subsequent runs if the value name in + the subesequent run was contained in the existing value name. For example, a + key named ``SetUpdateNotificationLevel`` would be overwritten by a subsequent + run attempting to set ``UpdateNotificationLevel`` [#64401](https://github.com/saltstack/salt/issues/64401) +- Add search for %ProgramData%\Chocolatey\choco.exe to determine if Chocolatey is installed or not [#64427](https://github.com/saltstack/salt/issues/64427) +- Fix regression for user.present on handling groups with dupe GIDs [#64430](https://github.com/saltstack/salt/issues/64430) +- Fix inconsistent use of args in ssh_auth.managed [#64442](https://github.com/saltstack/salt/issues/64442) +- Ensure we raise an error when the name argument is invalid in pkgrepo.managed state for systems using apt. [#64451](https://github.com/saltstack/salt/issues/64451) +- Fix file.symlink will not replace/update existing symlink [#64477](https://github.com/saltstack/salt/issues/64477) +- Fixed salt-ssh state.* commands returning retcode 0 when state/pillar rendering fails [#64514](https://github.com/saltstack/salt/issues/64514) +- Fix pkg.install when using a port in the url. [#64516](https://github.com/saltstack/salt/issues/64516) +- `win_pkg` Fixes an issue runing `pkg.install` with `version=latest` where the + new installer would not be cached if there was already an installer present + with the same name. [#64519](https://github.com/saltstack/salt/issues/64519) +- Added a `test:full` label in the salt repository, which, when selected, will force a full test run. [#64539](https://github.com/saltstack/salt/issues/64539) +- Syndic's async_req_channel uses the asynchornous version of request channel [#64552](https://github.com/saltstack/salt/issues/64552) +- Ensure runners properly save information to job cache. [#64570](https://github.com/saltstack/salt/issues/64570) +- Added salt.ufw to salt-master install on Debian and Ubuntu [#64572](https://github.com/saltstack/salt/issues/64572) +- Added support for Chocolatey 2.0.0+ while maintaining support for older versions [#64622](https://github.com/saltstack/salt/issues/64622) +- Updated semanage fcontext to use --modify if context already exists when adding context [#64625](https://github.com/saltstack/salt/issues/64625) +- Preserve request client socket between requests. [#64627](https://github.com/saltstack/salt/issues/64627) +- Show user friendly message when pillars timeout [#64651](https://github.com/saltstack/salt/issues/64651) +- File client timeouts durring jobs show user friendly errors instead of tracbacks [#64653](https://github.com/saltstack/salt/issues/64653) +- SaltClientError does not log a traceback on minions, we expect these to happen so a user friendly log is shown. [#64729](https://github.com/saltstack/salt/issues/64729) +- Look in location salt is running from, this accounts for running from an unpacked onedir file that has not been installed. [#64877](https://github.com/saltstack/salt/issues/64877) +- Preserve credentials on spawning platforms, minions no longer re-authenticate + with every job when using `multiprocessing=True`. [#64914](https://github.com/saltstack/salt/issues/64914) +- Fixed uninstaller to not remove the `salt` directory by default. This allows + the `extras-3.##` folder to persist so salt-pip dependencies are not wiped out + during an upgrade. [#64957](https://github.com/saltstack/salt/issues/64957) +- fix msteams by adding the missing header that Microsoft is now enforcing. [#64973](https://github.com/saltstack/salt/issues/64973) +- Fix __env__ and improve cache cleaning see more info at pull #65017. [#65002](https://github.com/saltstack/salt/issues/65002) +- Better error message on inconsistent decoded payload [#65020](https://github.com/saltstack/salt/issues/65020) +- Handle permissions access error when calling `lsb_release` with the salt user [#65024](https://github.com/saltstack/salt/issues/65024) +- Allow schedule state module to update schedule when the minion is offline. [#65033](https://github.com/saltstack/salt/issues/65033) +- Fixed creation of wildcard DNS in SAN in `x509_v2` [#65072](https://github.com/saltstack/salt/issues/65072) +- The macOS installer no longer removes the extras directory [#65073](https://github.com/saltstack/salt/issues/65073) + +# Added + +- Added a script to automate setting up a 2nd minion in a user context on Windows [#64439](https://github.com/saltstack/salt/issues/64439) +- Several fixes to the CI workflow: + + * Don't override the `on` Jinja block on the `ci.yaml` template. This enables reacting to labels getting added/removed + to/from pull requests. + * Switch to using `tools` and re-use the event payload available instead of querying the GH API again to get the pull + request labels + * Concentrate test selection by labels to a single place + * Enable code coverage on pull-requests by setting the `test:coverage` label [#64547](https://github.com/saltstack/salt/issues/64547) + +# Security + +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `aiohttp==3.8.5` due to https://github.com/advisories/GHSA-45c4-8wx5-qw6w [#64687](https://github.com/saltstack/salt/issues/64687) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) +- Update to `gitpython>=3.1.32` due to https://github.com/advisories/GHSA-pr76-5cm5-w9cj [#64988](https://github.com/saltstack/salt/issues/64988) + + +* Wed Aug 09 2023 Salt Project Packaging - 3006.2 + +# Fixed + +- In scenarios where PythonNet fails to load, Salt will now fall back to WMI for + gathering grains information [#64897](https://github.com/saltstack/salt/issues/64897) + +# Security + +- fix CVE-2023-20897 by catching exception instead of letting exception disrupt connection [#cve-2023-20897](https://github.com/saltstack/salt/issues/cve-2023-20897) +- Fixed gitfs cachedir_basename to avoid hash collisions. Added MP Lock to gitfs. These changes should stop race conditions. [#cve-2023-20898](https://github.com/saltstack/salt/issues/cve-2023-20898) +- Upgrade to `requests==2.31.0` + + Due to: + * https://github.com/advisories/GHSA-j8r2-6x86-q33q [#64336](https://github.com/saltstack/salt/issues/64336) +- Upgrade to `cryptography==41.0.3`(and therefor `pyopenssl==23.2.0` due to https://github.com/advisories/GHSA-jm77-qphf-c4w8) + + This only really impacts pip installs of Salt and the windows onedir since the linux and macos onedir build every package dependency from source, not from pre-existing wheels. + + Also resolves the following cryptography advisories: + + Due to: + * https://github.com/advisories/GHSA-5cpq-8wj7-hf2v + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + + There is no security upgrade available for Py3.5 [#64595](https://github.com/saltstack/salt/issues/64595) +- Bump to `certifi==2023.07.22` due to https://github.com/advisories/GHSA-xqr8-7jwr-rhp7 [#64718](https://github.com/saltstack/salt/issues/64718) +- Upgrade `relenv` to `0.13.2` and Python to `3.10.12` + + Addresses multiple CVEs in Python's dependencies: https://docs.python.org/release/3.10.12/whatsnew/changelog.html#python-3-10-12 [#64719](https://github.com/saltstack/salt/issues/64719) + + +* Fri May 05 2023 Salt Project Packaging - 3006.1 + +# Fixed + +- Check that the return data from the cloud create function is a dictionary before attempting to pull values out. [#61236](https://github.com/saltstack/salt/issues/61236) +- Ensure NamedLoaderContext's have their value() used if passing to other modules [#62477](https://github.com/saltstack/salt/issues/62477) +- add documentation note about reactor state ids. [#63589](https://github.com/saltstack/salt/issues/63589) +- Added support for ``test=True`` to the ``file.cached`` state module [#63785](https://github.com/saltstack/salt/issues/63785) +- Updated `source_hash` documentation and added a log warning when `source_hash` is used with a source other than `http`, `https` and `ftp`. [#63810](https://github.com/saltstack/salt/issues/63810) +- Fixed clear pillar cache on every highstate and added clean_pillar_cache=False to saltutil functions. [#64081](https://github.com/saltstack/salt/issues/64081) +- Fix dmsetup device names with hyphen being picked up. [#64082](https://github.com/saltstack/salt/issues/64082) +- Update all the scheduler functions to include a fire_event argument which will determine whether to fire the completion event onto the event bus. + This event is only used when these functions are called via the schedule execution modules. + Update all the calls to the schedule related functions in the deltaproxy proxy minion to include fire_event=False, as the event bus is not available when these functions are called. [#64102](https://github.com/saltstack/salt/issues/64102), [#64103](https://github.com/saltstack/salt/issues/64103) +- Default to a 0 timeout if none is given for the terraform roster to avoid `-o ConnectTimeout=None` when using `salt-ssh` [#64109](https://github.com/saltstack/salt/issues/64109) +- Disable class level caching of the file client on `SaltCacheLoader` and properly use context managers to take care of initialization and termination of the file client. [#64111](https://github.com/saltstack/salt/issues/64111) +- Fixed several file client uses which were not properly terminating it by switching to using it as a context manager + whenever possible or making sure `.destroy()` was called when using a context manager was not possible. [#64113](https://github.com/saltstack/salt/issues/64113) +- Fix running setup.py when passing in --salt-config-dir and --salt-cache-dir arguments. [#64114](https://github.com/saltstack/salt/issues/64114) +- Moved /etc/salt/proxy and /lib/systemd/system/salt-proxy@.service to the salt-minion DEB package [#64117](https://github.com/saltstack/salt/issues/64117) +- Stop passing `**kwargs` and be explicit about the keyword arguments to pass, namely, to `cp.cache_file` call in `salt.states.pkg` [#64118](https://github.com/saltstack/salt/issues/64118) +- lgpo_reg.set_value now returns ``True`` on success instead of ``None`` [#64126](https://github.com/saltstack/salt/issues/64126) +- Make salt user's home /opt/saltstack/salt [#64141](https://github.com/saltstack/salt/issues/64141) +- Fix cmd.run doesn't output changes in test mode [#64150](https://github.com/saltstack/salt/issues/64150) +- Move salt user and group creation to common package [#64158](https://github.com/saltstack/salt/issues/64158) +- Fixed issue in salt-cloud so that multiple masters specified in the cloud + are written to the minion config properly [#64170](https://github.com/saltstack/salt/issues/64170) +- Make sure the `salt-ssh` CLI calls it's `fsclient.destroy()` method when done. [#64184](https://github.com/saltstack/salt/issues/64184) +- Stop using the deprecated `salt.transport.client` imports. [#64186](https://github.com/saltstack/salt/issues/64186) +- Add a `.pth` to the Salt onedir env to ensure packages in extras are importable. Bump relenv to 0.12.3. [#64192](https://github.com/saltstack/salt/issues/64192) +- Fix ``lgpo_reg`` state to work with User policy [#64200](https://github.com/saltstack/salt/issues/64200) +- Cloud deployment directories are owned by salt user and group [#64204](https://github.com/saltstack/salt/issues/64204) +- ``lgpo_reg`` state now enforces and reports changes to the registry [#64222](https://github.com/saltstack/salt/issues/64222) + + +* Tue Apr 18 2023 Salt Project Packaging - 3006.0 + +# Removed + +- Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) +- Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) +- Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) +- Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) +- Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + +# Deprecated + +- renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) +- Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) +- The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) +- The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) +- Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) +- Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) +- Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) +- Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) +- Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) +- Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) +- `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + +# Changed + +- More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) +- Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) +- ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) +- Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) +- Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) +- Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) +- Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) +- Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) +- Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) +- Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) +- Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) +- Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) +- netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt-api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt-api. [#63050](https://github.com/saltstack/salt/issues/63050) +- Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) +- Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) +- Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) +- Upgraded to `relenv==0.9.0` [#63883](https://github.com/saltstack/salt/issues/63883) + +# Fixed + +- Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) +- Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) +- Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) +- Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) +- Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) +- Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) +- Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) +- Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) +- Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) +- Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) +- Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) +- Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) +- Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) +- When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) +- Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) +- Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) +- Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) +- Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) +- Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) +- Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) +- Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) +- Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) +- Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) +- Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) +- Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) +- Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) +- Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) +- Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) +- Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) +- Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) +- Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) +- linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) +- Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) +- Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) +- Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) +- Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) +- Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) +- Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) +- Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) +- Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) +- Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) +- add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) +- Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) +- Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) +- Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) +- Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) +- Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) +- Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) +- Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) +- Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) +- Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) +- Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) +- Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) +- Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) +- Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) +- state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) +- Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) +- Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) +- Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb-release is installed or not. They have been changed to + only derive these OS grains from `/etc/os-release`. [#61618](https://github.com/saltstack/salt/issues/61618) +- Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) +- Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) +- Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) +- Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) +- Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) +- Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) +- Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) +- When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) +- Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) +- win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) +- win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) +- Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) +- Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) +- Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) +- Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) +- Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) +- Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) +- Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) +- Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) +- Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) +- Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) +- Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) +- Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) +- Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) +- Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) +- Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) +- Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) +- Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) +- Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) +- Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) +- Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) +- Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) +- Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) +- Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) +- Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) +- Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) +- Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) +- Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) +- Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) +- Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) +- Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) +- Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) +- Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) +- Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) +- Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) +- Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) +- When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) +- Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) +- Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) +- When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) +- The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) +- sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) +- Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) +- Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) +- Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) +- Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) +- Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) +- Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) +- Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) +- Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) +- Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) +- Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) +- Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) +- Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) +- Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) +- Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) +- Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) +- Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) +- Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) +- Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) +- updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) +- Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) +- Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) +- Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) +- Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) +- Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) +- Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) +- Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) +- Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) +- Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) +- Remove mako as a dependency in Windows and macOS. [#62785](https://github.com/saltstack/salt/issues/62785) +- Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) +- Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) +- Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) +- Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) +- Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) +- Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) +- Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) +- Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) +- LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) +- Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) +- Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) +- Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) +- Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) +- Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) +- removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) +- Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) +- Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) +- Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) +- Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) +- Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) +- Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) +- Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) +- Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) +- Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) +- The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) +- Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) +- Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) +- Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) +- Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) +- Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) +- TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) +- Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) +- User responsible for the runner is now correctly reported in the events on the event bus for the runner. [#63148](https://github.com/saltstack/salt/issues/63148) +- Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) +- Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) +- Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) +- Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) +- ``service.status`` on Windows does no longer throws a CommandExecutionError if + the service is not found on the system. It now returns "Not Found" instead. [#63577](https://github.com/saltstack/salt/issues/63577) +- When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) +- LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) +- Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (``delete_after``) [#63650](https://github.com/saltstack/salt/issues/63650) +- When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) +- have salt.template.compile_template_str cleanup its temp files. [#63724](https://github.com/saltstack/salt/issues/63724) +- Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) +- Fixed an issue with generating fingerprints for public keys with different line endings [#63742](https://github.com/saltstack/salt/issues/63742) +- Add `fileserver_interval` and `maintenance_interval` master configuration options. These options control how often to restart the FileServerUpdate and Maintenance processes. Some file server and pillar configurations are known to cause memory leaks over time. A notable example of this are configurations that use pygit2. Salt can not guarantee dependency libraries like pygit2 won't leak memory. Restarting any long running processes that use pygit2 guarantees we can keep the master's memory usage in check. [#63747](https://github.com/saltstack/salt/issues/63747) +- mac_xattr.list and mac_xattr.read will replace undecode-able bytes to avoid raising CommandExecutionError. [#63779](https://github.com/saltstack/salt/issues/63779) [#63779](https://github.com/saltstack/salt/issues/63779) +- Change default GPG keyserver from pgp.mit.edu to keys.openpgp.org. [#63806](https://github.com/saltstack/salt/issues/63806) +- fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) +- Ensure kwargs is passed along to _call_apt when passed into install function. [#63847](https://github.com/saltstack/salt/issues/63847) +- remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) +- add linux_distribution to util to stop dep warning [#63904](https://github.com/saltstack/salt/issues/63904) +- Fix valuerror when trying to close fileclient. Remove usage of __del__ and close the filclient properly. [#63920](https://github.com/saltstack/salt/issues/63920) +- Handle the situation when a sub proxy minion does not init properly, eg. an exception happens, and the sub proxy object is not available. [#63923](https://github.com/saltstack/salt/issues/63923) +- Clarifying documentation for extension_modules configuration option. [#63929](https://github.com/saltstack/salt/issues/63929) +- Windows pkg module now properly handles versions containing strings [#63935](https://github.com/saltstack/salt/issues/63935) +- Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn't used by that function. [#63948](https://github.com/saltstack/salt/issues/63948) +- Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows [#63981](https://github.com/saltstack/salt/issues/63981) +- Hardened permissions on workers.ipc and master_event_pub.ipc. [#64063](https://github.com/saltstack/salt/issues/64063) + +# Added + +- Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `--salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) +- Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) +- Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) +- Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) +- A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) +- Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) +- Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) +- Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) +- Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) +- State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) +- Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) +- Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) +- Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) +- Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) +- Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) +- Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) +- Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) +- Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) +- Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) +- Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) +- Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) +- Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) +- 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) +- Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) +- Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) +- Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) +- Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) +- Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) +- Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) +- Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) +- Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) +- Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) +- Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) +- Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) +- Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) +- Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) +- Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) +- Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) +- Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) +- Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) +- Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) +- Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) +- Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) +- salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) +- Fix running fast tests twice and add git labels to suite. [#63081](https://github.com/saltstack/salt/issues/63081) +- Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) +- Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) +- Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) +- Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) +- Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) +- Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) +- Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) +- Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) +- Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) +- Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) +- Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +- Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) +- Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) +- Adding the ability to exclude arguments from a state that end up passed to cmd.retcode when requisites such as onlyif or unless are used. [#63956](https://github.com/saltstack/salt/issues/63956) +- Add --next-release argument to salt/version.py, which prints the next upcoming release. [#64023](https://github.com/saltstack/salt/issues/64023) + +# Security + +- Upgrade Requirements Due to Security Issues. + + * Upgrade to `cryptography>=39.0.1` due to: + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. + * Update to `markdown-it-py==2.2.0` due to: + * https://github.com/advisories/GHSA-jrwr-5x3p-hvc3 + * https://github.com/advisories/GHSA-vrjv-mxr7-vjf8 [#63882](https://github.com/saltstack/salt/issues/63882) + + +* Wed Mar 29 2023 Salt Project Packaging - 3006.0~rc3 + +# Removed + +- Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) +- Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) +- Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) +- Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) +- Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + +# Deprecated + +- renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) +- Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) +- The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) +- The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) +- Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) +- Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) +- Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) +- Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) +- Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) +- Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) +- `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + +# Changed + +- More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) +- Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) +- ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) +- Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) +- Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) +- Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) +- Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) +- Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) +- Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) +- Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) +- Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) +- Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) +- netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt-api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt-api. [#63050](https://github.com/saltstack/salt/issues/63050) +- Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) +- Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) +- Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) +- Upgraded to `relenv==0.9.0` [#63883](https://github.com/saltstack/salt/issues/63883) + +# Fixed + +- Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) +- Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) +- Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) +- Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) +- Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) +- Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) +- Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) +- Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) +- Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) +- Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) +- Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) +- Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) +- Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) +- When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) +- Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) +- Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) +- Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) +- Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) +- Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) +- Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) +- Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) +- Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) +- Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) +- Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) +- Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) +- Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) +- Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) +- Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) +- Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) +- Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) +- Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) +- linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) +- Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) +- Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) +- Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) +- Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) +- Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) +- Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) +- Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) +- Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) +- Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) +- add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) +- Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) +- Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) +- Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) +- Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) +- Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) +- Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) +- Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) +- Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) +- Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) +- Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) +- Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) +- Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) +- Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) +- state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) +- Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) +- Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) +- Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb-release is installed or not. They have been changed to + only derive these OS grains from `/etc/os-release`. [#61618](https://github.com/saltstack/salt/issues/61618) +- Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) +- Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) +- Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) +- Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) +- Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) +- Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) +- Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) +- When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) +- Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) +- win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) +- win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) +- Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) +- Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) +- Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) +- Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) +- Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) +- Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) +- Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) +- Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) +- Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) +- Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) +- Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) +- Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) +- Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) +- Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) +- Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) +- Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) +- Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) +- Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) +- Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) +- Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) +- Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) +- Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) +- Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) +- Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) +- Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) +- Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) +- Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) +- Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) +- Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) +- Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) +- Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) +- Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) +- Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) +- Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) +- Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) +- When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) +- Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) +- Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) +- When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) +- The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) +- sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) +- Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) +- Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) +- Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) +- Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) +- Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) +- Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) +- Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) +- Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) +- Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) +- Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) +- Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) +- Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) +- Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) +- Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) +- Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) +- Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) +- Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) +- Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) +- updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) +- Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) +- Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) +- Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) +- Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) +- Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) +- Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) +- Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) +- Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) +- Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) +- Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) +- Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) +- Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) +- Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) +- Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) +- Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) +- Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) +- Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) +- LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) +- Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) +- Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) +- Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) +- Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) +- Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) +- removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) +- Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) +- Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) +- Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) +- Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) +- Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) +- Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) +- Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) +- Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) +- Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) +- The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) +- Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) +- Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) +- Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) +- Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) +- Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) +- TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) +- Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) +- Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) +- Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) +- Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) +- Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) +- When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) +- LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) +- Fixed the ability to set a scheduled task to auto delete if not scheduled to run again (``delete_after``) [#63650](https://github.com/saltstack/salt/issues/63650) +- When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) +- have salt.template.compile_template_str cleanup its temp files. [#63724](https://github.com/saltstack/salt/issues/63724) +- Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) +- Fixed an issue with generating fingerprints for public keys with different line endings [#63742](https://github.com/saltstack/salt/issues/63742) +- Change default GPG keyserver from pgp.mit.edu to keys.openpgp.org. [#63806](https://github.com/saltstack/salt/issues/63806) +- fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) +- Ensure kwargs is passed along to _call_apt when passed into install function. [#63847](https://github.com/saltstack/salt/issues/63847) +- remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) +- add linux_distribution to util to stop dep warning [#63904](https://github.com/saltstack/salt/issues/63904) +- Handle the situation when a sub proxy minion does not init properly, eg. an exception happens, and the sub proxy object is not available. [#63923](https://github.com/saltstack/salt/issues/63923) +- Clarifying documentation for extension_modules configuration option. [#63929](https://github.com/saltstack/salt/issues/63929) +- Windows pkg module now properly handles versions containing strings [#63935](https://github.com/saltstack/salt/issues/63935) +- Handle the scenario when the check_cmd requisite is used with a state function when the state has a local check_cmd function but that function isn't used by that function. [#63948](https://github.com/saltstack/salt/issues/63948) +- Issue #63981: Allow users to pass verify_ssl to pkg.install/pkg.installed on Windows [#63981](https://github.com/saltstack/salt/issues/63981) + +# Added + +- Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `--salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) +- Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) +- Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) +- Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) +- A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) +- Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) +- Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) +- Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) +- Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) +- State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) +- Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) +- Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) +- Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) +- Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) +- Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) +- Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) +- Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) +- Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) +- Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) +- Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) +- Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) +- Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) +- 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) +- Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) +- Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) +- Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) +- Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) +- Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) +- Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) +- Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) +- Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) +- Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) +- Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) +- Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) +- Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) +- Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) +- Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) +- Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) +- Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) +- Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) +- Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) +- Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) +- Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) +- salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) +- Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) +- Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) +- Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) +- Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) +- Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) +- Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) +- Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) +- Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) +- Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) +- Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) +- Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +- Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) +- Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) + +# Security + +- Upgrade Requirements Due to Security Issues. + + * Upgrade to `cryptography>=39.0.1` due to: + * https://github.com/advisories/GHSA-x4qr-2fvf-3mr5 + * https://github.com/advisories/GHSA-w7pp-m8wf-vj6r + * Upgrade to `pyopenssl==23.0.0` due to the cryptography upgrade. + * Update to `markdown-it-py==2.2.0` due to: + * https://github.com/advisories/GHSA-jrwr-5x3p-hvc3 + * https://github.com/advisories/GHSA-vrjv-mxr7-vjf8 [#63882](https://github.com/saltstack/salt/issues/63882) + + +* Sun Mar 19 2023 Salt Project Packaging - 3006.0~rc2 + +# Removed + +- Remove and deprecate the __orchestration__ key from salt.runner and salt.wheel return data. To get it back, set features.enable_deprecated_orchestration_flag master configuration option to True. The flag will be completely removed in Salt 3008 Argon. [#59917](https://github.com/saltstack/salt/issues/59917) +- Removed distutils and replaced with setuptools, given distutils is deprecated and removed in Python 3.12 [#60476](https://github.com/saltstack/salt/issues/60476) +- Removed ``runtests`` targets from ``noxfile.py`` [#62239](https://github.com/saltstack/salt/issues/62239) +- Removed the PyObjC dependency. + + This addresses problems with building a one dir build for macOS. + It became problematic because depending on the macOS version, it pulls different dependencies, and we would either have to build a macos onedir for each macOS supported release, or ship a crippled onedir(because it would be tied to the macOS version where the onedir was built). + Since it's currently not being used, it's removed. [#62432](https://github.com/saltstack/salt/issues/62432) +- Removed `SixRedirectImporter` from Salt. Salt hasn't shipped `six` since Salt 3004. [#63874](https://github.com/saltstack/salt/issues/63874) + +# Deprecated + +- renamed `keep_jobs`, specifying job cache TTL in hours, to `keep_jobs_seconds`, specifying TTL in seconds. + `keep_jobs` will be removed in the Argon release [#55295](https://github.com/saltstack/salt/issues/55295) +- Removing all references to napalm-base which is no longer supported. [#61542](https://github.com/saltstack/salt/issues/61542) +- The 'ip_bracket' function has been moved from salt/utils/zeromq.py in salt/utils/network.py [#62009](https://github.com/saltstack/salt/issues/62009) +- The `expand_repo_def` function in `salt.modules.aptpkg` is now deprecated. It's only used in `salt.states.pkgrepo` and it has no use of being exposed to the CLI. [#62485](https://github.com/saltstack/salt/issues/62485) +- Deprecated defunct Django returner [#62644](https://github.com/saltstack/salt/issues/62644) +- Deprecate core ESXi and associated states and modules, vcenter and vsphere support in favor of Salt VMware Extensions [#62754](https://github.com/saltstack/salt/issues/62754) +- Removing manufacture grain which has been deprecated. [#62914](https://github.com/saltstack/salt/issues/62914) +- Removing deprecated utils/boto3_elasticsearch.py [#62915](https://github.com/saltstack/salt/issues/62915) +- Removing support for the now deprecated _ext_nodes from salt/master.py. [#62917](https://github.com/saltstack/salt/issues/62917) +- Deprecating the Salt Slack engine in favor of the Salt Slack Bolt Engine. [#63095](https://github.com/saltstack/salt/issues/63095) +- `salt.utils.version.StrictVersion` is now deprecated and it's use should be replaced with `salt.utils.version.Version`. [#63383](https://github.com/saltstack/salt/issues/63383) + +# Changed + +- More intelligent diffing in changes of file.serialize state. [#48609](https://github.com/saltstack/salt/issues/48609) +- Move deprecation of the neutron module to Argon. Please migrate to the neutronng module instead. [#49430](https://github.com/saltstack/salt/issues/49430) +- ``umask`` is now a global state argument, instead of only applying to ``cmd`` + states. [#57803](https://github.com/saltstack/salt/issues/57803) +- Update pillar.obfuscate to accept kwargs in addition to args. This is useful when passing in keyword arguments like saltenv that are then passed along to pillar.items. [#58971](https://github.com/saltstack/salt/issues/58971) +- Improve support for listing macOS brew casks [#59439](https://github.com/saltstack/salt/issues/59439) +- Add missing MariaDB Grants to mysql module. + MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. + Also improved exception handling in `grant_add` which did not log the original error message and replaced it with a generic error. [#61409](https://github.com/saltstack/salt/issues/61409) +- Use VENV_PIP_TARGET environment variable as a default target for pip if present. [#62089](https://github.com/saltstack/salt/issues/62089) +- Disabled FQDNs grains on macOS by default [#62168](https://github.com/saltstack/salt/issues/62168) +- Replaced pyroute2.IPDB with pyroute2.NDB, as the former is deprecated [#62218](https://github.com/saltstack/salt/issues/62218) +- Enhance capture of error messages for Zypper calls in zypperpkg module. [#62346](https://github.com/saltstack/salt/issues/62346) +- Removed GPG_1_3_1 check [#62895](https://github.com/saltstack/salt/issues/62895) +- Requisite state chunks now all consistently contain `__id__`, `__sls__` and `name`. [#63012](https://github.com/saltstack/salt/issues/63012) +- netapi_enable_clients option to allow enabling/disabling of clients in salt-api. + By default all clients will now be disabled. Users of salt-api will need + to update their master config to enable the clients that they use. Not adding + the netapi_enable_clients option with required clients to the master config will + disable salt-api. [#63050](https://github.com/saltstack/salt/issues/63050) +- Stop relying on `salt/_version.py` to write Salt's version. Instead use `salt/_version.txt` which only contains the version string. [#63383](https://github.com/saltstack/salt/issues/63383) +- Set enable_fqdns_grains to be False by default. [#63595](https://github.com/saltstack/salt/issues/63595) +- Changelog snippet files must now have a `.md` file extension to be more explicit on what type of rendering is done when they are included in the main `CHANGELOG.md` file. [#63710](https://github.com/saltstack/salt/issues/63710) + +# Fixed + +- Add kwargs to handle extra parameters for http.query [#36138](https://github.com/saltstack/salt/issues/36138) +- Fix mounted bind mounts getting active mount options added [#39292](https://github.com/saltstack/salt/issues/39292) +- Fix `sysctl.present` converts spaces to tabs. [#40054](https://github.com/saltstack/salt/issues/40054) +- Fixes state pkg.purged to purge removed packages on Debian family systems [#42306](https://github.com/saltstack/salt/issues/42306) +- Fix fun_args missing from syndic returns [#45823](https://github.com/saltstack/salt/issues/45823) +- Fix mount.mounted with 'mount: False' reports unmounted file system as unchanged when running with test=True [#47201](https://github.com/saltstack/salt/issues/47201) +- Issue #49310: Allow users to touch a file with Unix date of birth [#49310](https://github.com/saltstack/salt/issues/49310) +- Do not raise an exception in pkg.info_installed on nonzero return code [#51620](https://github.com/saltstack/salt/issues/51620) +- Passes the value of the force parameter from file.copy to its call to file.remove so that files with the read-only attribute are handled. [#51739](https://github.com/saltstack/salt/issues/51739) +- Fixed x509.certificate_managed creates new certificate every run in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#52167](https://github.com/saltstack/salt/issues/52167) +- Don't check for cached pillar errors on state.apply [#52354](https://github.com/saltstack/salt/issues/52354), [#57180](https://github.com/saltstack/salt/issues/57180), [#59339](https://github.com/saltstack/salt/issues/59339) +- Swapping out args and kwargs for arg and kwarg respectively in the Slack engine when the command passed is a runner. [#52400](https://github.com/saltstack/salt/issues/52400) +- Ensure when we're adding chunks to the rules when running aggregation with the iptables state module we use a copy of the chunk otherwise we end up with a recursive mess. [#53353](https://github.com/saltstack/salt/issues/53353) +- When user_create or user_remove fail, return False instead of returning the error. [#53377](https://github.com/saltstack/salt/issues/53377) +- Include sync_roster when sync_all is called. [#53914](https://github.com/saltstack/salt/issues/53914) +- Avoid warning noise in lograte.get [#53988](https://github.com/saltstack/salt/issues/53988) +- Fixed listing revoked keys with gpg.list_keys [#54347](https://github.com/saltstack/salt/issues/54347) +- Fix mount.mounted does not handle blanks properly [#54508](https://github.com/saltstack/salt/issues/54508) +- Fixed grain num_cpus get wrong CPUs count in case of inconsistent CPU numbering. [#54682](https://github.com/saltstack/salt/issues/54682) +- Fix spelling error for python_shell argument in dpkg_lower module [#54907](https://github.com/saltstack/salt/issues/54907) +- Cleaned up bytes response data before sending to non-bytes compatible returners (postgres, mysql) [#55226](https://github.com/saltstack/salt/issues/55226) +- Fixed malformed state return when testing file.managed with unavailable source file [#55269](https://github.com/saltstack/salt/issues/55269) +- Included stdout in error message for Zypper calls in zypperpkg module. [#56016](https://github.com/saltstack/salt/issues/56016) +- Fixed pillar.filter_by with salt-ssh [#56093](https://github.com/saltstack/salt/issues/56093) +- Fix boto_route53 issue with (multiple) VPCs. [#57139](https://github.com/saltstack/salt/issues/57139) +- Remove log from mine runner which was not used. [#57463](https://github.com/saltstack/salt/issues/57463) +- Fixed x509.read_certificate error when reading a Microsoft CA issued certificate in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#57535](https://github.com/saltstack/salt/issues/57535) +- Updating Slack engine to use slack_bolt library. [#57842](https://github.com/saltstack/salt/issues/57842) +- Fixed warning about replace=True with x509.certificate_managed in the new cryptography x509 module. [#58165](https://github.com/saltstack/salt/issues/58165) +- Fix salt.modules.pip:is_installed doesn't handle locally installed packages [#58202](https://github.com/saltstack/salt/issues/58202) +- Add missing MariaDB Grants to mysql module. MariaDB has added some grants in 10.4.x and 10.5.x that are not present here, which results in an error when creating. [#58297](https://github.com/saltstack/salt/issues/58297) +- linux_shadow: Fix cases where malformed shadow entries cause `user.present` + states to fail. [#58423](https://github.com/saltstack/salt/issues/58423) +- Fixed salt.utils.compat.cmp to work with dictionaries [#58729](https://github.com/saltstack/salt/issues/58729) +- Fixed formatting for terse output mode [#58953](https://github.com/saltstack/salt/issues/58953) +- Fixed RecursiveDictDiffer with added nested dicts [#59017](https://github.com/saltstack/salt/issues/59017) +- Fixed x509.certificate_managed has DoS effect on master in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59169](https://github.com/saltstack/salt/issues/59169) +- Fixed saltnado websockets disconnecting immediately [#59183](https://github.com/saltstack/salt/issues/59183) +- Fixed x509.certificate_managed rolls certificates every now and then in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#59315](https://github.com/saltstack/salt/issues/59315) +- Fix postgres_privileges.present not idempotent for functions [#59585](https://github.com/saltstack/salt/issues/59585) +- Fixed influxdb_continuous_query.present state to provide the client args to the underlying module on create. [#59766](https://github.com/saltstack/salt/issues/59766) +- Warn when using insecure (http:// based) key_urls for apt-based systems in pkgrepo.managed, and add a kwarg that determines the validity of such a url. [#59786](https://github.com/saltstack/salt/issues/59786) +- add load balancing policy default option and ensure the module can be executed with arguments from CLI [#59909](https://github.com/saltstack/salt/issues/59909) +- Fix salt-ssh when using imports with extra-filerefs. [#60003](https://github.com/saltstack/salt/issues/60003) +- Fixed cache directory corruption startup error [#60170](https://github.com/saltstack/salt/issues/60170) +- Update docs remove dry_run in docstring of file.blockreplace state. [#60227](https://github.com/saltstack/salt/issues/60227) +- Adds Parrot to OS_Family_Map in grains. [#60249](https://github.com/saltstack/salt/issues/60249) +- Fixed stdout and stderr being empty sometimes when use_vt=True for the cmd.run[*] functions [#60365](https://github.com/saltstack/salt/issues/60365) +- Use return code in iptables --check to verify rule exists. [#60467](https://github.com/saltstack/salt/issues/60467) +- Fix regression pip.installed does not pass env_vars when calling pip.list [#60557](https://github.com/saltstack/salt/issues/60557) +- Fix xfs module when additional output included in mkfs.xfs command. [#60853](https://github.com/saltstack/salt/issues/60853) +- Fixed parsing new format of terraform states in roster.terraform [#60915](https://github.com/saltstack/salt/issues/60915) +- Fixed recognizing installed ARMv7 rpm packages in compatible architectures. [#60994](https://github.com/saltstack/salt/issues/60994) +- Fixing changes dict in pkg state to be consistent when installing and test=True. [#60995](https://github.com/saltstack/salt/issues/60995) +- Fix cron.present duplicating entries when changing timespec to special. [#60997](https://github.com/saltstack/salt/issues/60997) +- Made salt-ssh respect --wipe again [#61083](https://github.com/saltstack/salt/issues/61083) +- state.orchestrate_single only passes a pillar if it is set to the state + function. This allows it to be used with state functions that don't accept a + pillar keyword argument. [#61092](https://github.com/saltstack/salt/issues/61092) +- Fix ipset state when the comment kwarg is set. [#61122](https://github.com/saltstack/salt/issues/61122) +- Fix issue with archive.unzip where the password was not being encoded for the extract function [#61422](https://github.com/saltstack/salt/issues/61422) +- Some Linux distributions (like AlmaLinux, Astra Linux, Debian, Mendel, Linux + Mint, Pop!_OS, Rocky Linux) report different `oscodename`, `osfullname`, + `osfinger` grains if lsb-release is installed or not. They have been changed to + only derive these OS grains from `/etc/os-release`. [#61618](https://github.com/saltstack/salt/issues/61618) +- Pop!_OS uses the full version (YY.MM) in the osfinger grain now, not just the year. This allows differentiating for example between 20.04 and 20.10. [#61619](https://github.com/saltstack/salt/issues/61619) +- Fix ssh config roster to correctly parse the ssh config files that contain spaces. [#61650](https://github.com/saltstack/salt/issues/61650) +- Fix SoftLayer configuration not raising an exception when a domain is missing [#61727](https://github.com/saltstack/salt/issues/61727) +- Allow the minion to start or salt-call to run even if the user doesn't have permissions to read the root_dir value from the registry [#61789](https://github.com/saltstack/salt/issues/61789) +- Need to move the creation of the proxy object for the ProxyMinion further down in the initialization for sub proxies to ensure that all modules, especially any custom proxy modules, are available before attempting to run the init function. [#61805](https://github.com/saltstack/salt/issues/61805) +- Fixed malformed state return when merge-serializing to an improperly formatted file [#61814](https://github.com/saltstack/salt/issues/61814) +- Made cmdmod._run[_all]_quiet work during minion startup on MacOS with runas specified (which fixed mac_service) [#61816](https://github.com/saltstack/salt/issues/61816) +- When deleting the vault cache, also delete from the session cache [#61821](https://github.com/saltstack/salt/issues/61821) +- Ignore errors on reading license info with dpkg_lowpkg to prevent tracebacks on getting package information. [#61827](https://github.com/saltstack/salt/issues/61827) +- win_lgpo: Display conflicting policy names when more than one policy is found [#61859](https://github.com/saltstack/salt/issues/61859) +- win_lgpo: Fixed intermittent KeyError when getting policy setting using lgpo.get_policy [#61860](https://github.com/saltstack/salt/issues/61860) +- Fixed listing minions on OpenBSD [#61966](https://github.com/saltstack/salt/issues/61966) +- Make Salt to return an error on "pkg" modules and states when targeting duplicated package names [#62019](https://github.com/saltstack/salt/issues/62019) +- Fix return of REST-returned permissions when auth_list is set [#62022](https://github.com/saltstack/salt/issues/62022) +- Normalize package names once on using pkg.installed/removed with yum to make it possible to install packages with the name containing a part similar to a name of architecture. [#62029](https://github.com/saltstack/salt/issues/62029) +- Fix inconsitency regarding name and pkgs parameters between zypperpkg.upgrade() and yumpkg.upgrade() [#62030](https://github.com/saltstack/salt/issues/62030) +- Fix attr=all handling in pkg.list_pkgs() (yum/zypper). [#62032](https://github.com/saltstack/salt/issues/62032) +- Fixed the humanname being ignored in pkgrepo.managed on openSUSE Leap [#62053](https://github.com/saltstack/salt/issues/62053) +- Fixed issue with some LGPO policies having whitespace at the beginning or end of the element alias [#62058](https://github.com/saltstack/salt/issues/62058) +- Fix ordering of args to libcloud_storage.download_object module [#62074](https://github.com/saltstack/salt/issues/62074) +- Ignore extend declarations in sls files that are excluded. [#62082](https://github.com/saltstack/salt/issues/62082) +- Remove leftover usage of impacket [#62101](https://github.com/saltstack/salt/issues/62101) +- Pass executable path from _get_path_exec() is used when calling the program. + The $HOME env is no longer modified globally. + Only trailing newlines are stripped from the fetched secret. + Pass process arguments are handled in a secure way. [#62120](https://github.com/saltstack/salt/issues/62120) +- Ignore some command return codes in openbsdrcctl_service to prevent spurious errors [#62131](https://github.com/saltstack/salt/issues/62131) +- Fixed extra period in filename output in tls module. Instead of "server.crt." it will now be "server.crt". [#62139](https://github.com/saltstack/salt/issues/62139) +- Make sure lingering PAexec-*.exe files in the Windows directory are cleaned up [#62152](https://github.com/saltstack/salt/issues/62152) +- Restored Salt's DeprecationWarnings [#62185](https://github.com/saltstack/salt/issues/62185) +- Fixed issue with forward slashes on Windows with file.recurse and clean=True [#62197](https://github.com/saltstack/salt/issues/62197) +- Recognize OSMC as Debian-based [#62198](https://github.com/saltstack/salt/issues/62198) +- Fixed Zypper module failing on RPM lock file being temporarily unavailable. [#62204](https://github.com/saltstack/salt/issues/62204) +- Improved error handling and diagnostics in the proxmox salt-cloud driver [#62211](https://github.com/saltstack/salt/issues/62211) +- Added EndeavourOS to the Arch os_family. [#62220](https://github.com/saltstack/salt/issues/62220) +- Fix salt-ssh not detecting `platform-python` as a valid interpreter on EL8 [#62235](https://github.com/saltstack/salt/issues/62235) +- Fix pkg.version_cmp on openEuler and a few other os flavors. [#62248](https://github.com/saltstack/salt/issues/62248) +- Fix localhost detection in glusterfs.peers [#62273](https://github.com/saltstack/salt/issues/62273) +- Fix Salt Package Manager (SPM) exception when calling spm create_repo . [#62281](https://github.com/saltstack/salt/issues/62281) +- Fix matcher slowness due to loader invocation [#62283](https://github.com/saltstack/salt/issues/62283) +- Fixes the Puppet module for non-aio Puppet packages for example running the Puppet module on FreeBSD. [#62323](https://github.com/saltstack/salt/issues/62323) +- Issue 62334: Displays a debug log message instead of an error log message when the publisher fails to connect [#62334](https://github.com/saltstack/salt/issues/62334) +- Fix pyobjects renderer access to opts and sls [#62336](https://github.com/saltstack/salt/issues/62336) +- Fix use of random shuffle and sample functions as Jinja filters [#62372](https://github.com/saltstack/salt/issues/62372) +- Fix groups with duplicate GIDs are not returned by get_group_list [#62377](https://github.com/saltstack/salt/issues/62377) +- Fix the "zpool.present" state when enabling zpool features that are already active. [#62390](https://github.com/saltstack/salt/issues/62390) +- Fix ability to execute remote file client methods in saltcheck [#62398](https://github.com/saltstack/salt/issues/62398) +- Update all platforms to use pycparser 2.21 or greater for Py 3.9 or higher, fixes fips fault with openssl v3.x [#62400](https://github.com/saltstack/salt/issues/62400) +- Due to changes in the Netmiko library for the exception paths, need to check the version of Netmiko python library and then import the exceptions from different locations depending on the result. [#62405](https://github.com/saltstack/salt/issues/62405) +- When using preq on a state, then prereq state will first be run with test=True to determine if there are changes. When there are changes, the state with the prereq option will be run prior to the prereq state. If this state fails then the prereq state will not run and the state output uses the test=True run. However, the proposed changes are included for the prereq state are included from the test=True run. We should pull those out as there weren't actually changes since the prereq state did not run. [#62408](https://github.com/saltstack/salt/issues/62408) +- Added directory mode for file.copy with makedirs [#62426](https://github.com/saltstack/salt/issues/62426) +- Provide better error handling in the various napalm proxy minion functions when the device is not accessible. [#62435](https://github.com/saltstack/salt/issues/62435) +- When handling aggregation, change the order to ensure that the requisites are aggregated first and then the state functions are aggregated. Caching whether aggregate functions are available for particular states so we don't need to attempt to load them everytime. [#62439](https://github.com/saltstack/salt/issues/62439) +- The patch allows to boostrap kubernetes clusters in the version above 1.13 via salt module [#62451](https://github.com/saltstack/salt/issues/62451) +- sysctl.persist now updates the in-memory value on FreeBSD even if the on-disk value was already correct. [#62461](https://github.com/saltstack/salt/issues/62461) +- Fixed parsing CDROM apt sources [#62474](https://github.com/saltstack/salt/issues/62474) +- Update sanitizing masking for Salt SSH to include additional password like strings. [#62483](https://github.com/saltstack/salt/issues/62483) +- Fix user/group checking on file state functions in the test mode. [#62499](https://github.com/saltstack/salt/issues/62499) +- Fix user.present to allow removing groups using optional_groups parameter and enforcing idempotent group membership. [#62502](https://github.com/saltstack/salt/issues/62502) +- Fix possible tracebacks if there is a package with '------' or '======' in the description is installed on the Debian based minion. [#62519](https://github.com/saltstack/salt/issues/62519) +- Fixed the omitted "pool" parameter when cloning a VM with the proxmox salt-cloud driver [#62521](https://github.com/saltstack/salt/issues/62521) +- Fix rendering of pyobjects states in saltcheck [#62523](https://github.com/saltstack/salt/issues/62523) +- Fixes pillar where a corrupted CacheDisk file forces the pillar to be rebuilt [#62527](https://github.com/saltstack/salt/issues/62527) +- Use str() method instead of repo_line for when python3-apt is installed or not in aptpkg.py. [#62546](https://github.com/saltstack/salt/issues/62546) +- Remove the connection_timeout from netmiko_connection_args before netmiko_connection_args is added to __context__["netmiko_device"]["args"] which is passed along to the Netmiko library. [#62547](https://github.com/saltstack/salt/issues/62547) +- Fix order specific mount.mounted options for persist [#62556](https://github.com/saltstack/salt/issues/62556) +- Fixed salt-cloud cloning a proxmox VM with a specified new vmid. [#62558](https://github.com/saltstack/salt/issues/62558) +- Fix runas with cmd module when using the onedir bundled packages [#62565](https://github.com/saltstack/salt/issues/62565) +- Update setproctitle version for all platforms [#62576](https://github.com/saltstack/salt/issues/62576) +- Fixed missing parameters when cloning a VM with the proxmox salt-cloud driver [#62580](https://github.com/saltstack/salt/issues/62580) +- Handle PermissionError when importing crypt when FIPS is enabled. [#62587](https://github.com/saltstack/salt/issues/62587) +- Correctly reraise exceptions in states.http [#62595](https://github.com/saltstack/salt/issues/62595) +- Fixed syndic eauth. Now jobs will be published when a valid eauth user is targeting allowed minions/functions. [#62618](https://github.com/saltstack/salt/issues/62618) +- updated rest_cherry/app to properly detect arg sent as a string as curl will do when only one arg is supplied. [#62624](https://github.com/saltstack/salt/issues/62624) +- Prevent possible tracebacks in core grains module by ignoring non utf8 characters in /proc/1/environ, /proc/1/cmdline, /proc/cmdline [#62633](https://github.com/saltstack/salt/issues/62633) +- Fixed vault ext pillar return data for KV v2 [#62651](https://github.com/saltstack/salt/issues/62651) +- Fix saltcheck _get_top_states doesn't pass saltenv to state.show_top [#62654](https://github.com/saltstack/salt/issues/62654) +- Fix groupadd.* functions hard code relative command name [#62657](https://github.com/saltstack/salt/issues/62657) +- Fixed pdbedit.create trying to use a bytes-like hash as string. [#62670](https://github.com/saltstack/salt/issues/62670) +- Fix depenency on legacy boto module in boto3 modules [#62672](https://github.com/saltstack/salt/issues/62672) +- Modified "_get_flags" function so that it returns regex flags instead of integers [#62676](https://github.com/saltstack/salt/issues/62676) +- Change startup ReqServer log messages from error to info level. [#62728](https://github.com/saltstack/salt/issues/62728) +- Fix kmod.* functions hard code relative command name [#62772](https://github.com/saltstack/salt/issues/62772) +- Fix mac_brew_pkg to work with null taps [#62793](https://github.com/saltstack/salt/issues/62793) +- Fixing a bug when listing the running schedule if "schedule.enable" and/or "schedule.disable" has been run, where the "enabled" items is being treated as a schedule item. [#62795](https://github.com/saltstack/salt/issues/62795) +- Prevent annoying RuntimeWarning message about line buffering (buffering=1) not being supported in binary mode [#62817](https://github.com/saltstack/salt/issues/62817) +- Include UID and GID checks in modules.file.check_perms as well as comparing + ownership by username and group name. [#62818](https://github.com/saltstack/salt/issues/62818) +- Fix presence events on TCP transport by removing a client's presence when minion disconnects from publish channel correctly [#62826](https://github.com/saltstack/salt/issues/62826) +- Remove Azure deprecation messages from functions that always run w/ salt-cloud [#62845](https://github.com/saltstack/salt/issues/62845) +- Use select instead of iterating over entrypoints as a dictionary for importlib_metadata>=5.0.0 [#62854](https://github.com/saltstack/salt/issues/62854) +- Fixed master job scheduler using when [#62858](https://github.com/saltstack/salt/issues/62858) +- LGPO: Added support for missing domain controller policies: VulnerableChannelAllowList and LdapEnforceChannelBinding [#62873](https://github.com/saltstack/salt/issues/62873) +- Fix unnecessarily complex gce metadata grains code to use googles metadata service more effectively. [#62878](https://github.com/saltstack/salt/issues/62878) +- Fixed dockermod version_info function for docker-py 6.0.0+ [#62882](https://github.com/saltstack/salt/issues/62882) +- Moving setting the LOAD_BALANCING_POLICY_MAP dictionary into the try except block that determines if the cassandra_cql module should be made available. [#62886](https://github.com/saltstack/salt/issues/62886) +- Updating various MongoDB module functions to work with latest version of pymongo. [#62900](https://github.com/saltstack/salt/issues/62900) +- Restored channel for Syndic minions to send job returns to the Salt master. [#62933](https://github.com/saltstack/salt/issues/62933) +- removed _resolve_deps as it required a library that is not generally avalible. and switched to apt-get for everything as that can auto resolve dependencies. [#62934](https://github.com/saltstack/salt/issues/62934) +- Updated pyzmq to version 22.0.3 on Windows builds because the old version was causing salt-minion/salt-call to hang [#62937](https://github.com/saltstack/salt/issues/62937) +- Allow root user to modify crontab lines for non-root users (except AIX and Solaris). Align crontab line changes with the file ones and also with listing crontab. [#62940](https://github.com/saltstack/salt/issues/62940) +- Fix systemd_service.* functions hard code relative command name [#62942](https://github.com/saltstack/salt/issues/62942) +- Fix file.symlink backupname operation can copy remote contents to local disk [#62953](https://github.com/saltstack/salt/issues/62953) +- Issue #62968: Fix issue where cloud deployments were putting the keys in the wrong location on Windows hosts [#62968](https://github.com/saltstack/salt/issues/62968) +- Fixed gpg_passphrase issue with gpg decrypt/encrypt functions [#62977](https://github.com/saltstack/salt/issues/62977) +- Fix file.tidied FileNotFoundError [#62986](https://github.com/saltstack/salt/issues/62986) +- Fixed bug where module.wait states were detected as running legacy module.run syntax [#62988](https://github.com/saltstack/salt/issues/62988) +- Fixed issue with win_wua module where it wouldn't load if the CryptSvc was set to Manual start [#62993](https://github.com/saltstack/salt/issues/62993) +- The `__opts__` dunder dictionary is now added to the loader's `pack` if not + already present, which makes it accessible via the + `salt.loader.context.NamedLoaderContext` class. [#63013](https://github.com/saltstack/salt/issues/63013) +- Issue #63024: Fix issue where grains and config data were being place in the wrong location on Windows hosts [#63024](https://github.com/saltstack/salt/issues/63024) +- Fix btrfs.subvolume_snapshot command failing [#63025](https://github.com/saltstack/salt/issues/63025) +- Fix file.retention_schedule always reports changes [#63033](https://github.com/saltstack/salt/issues/63033) +- Fix mongo authentication for mongo ext_pillar and mongo returner + + This fix also include the ability to use the mongo connection string for mongo ext_pillar [#63058](https://github.com/saltstack/salt/issues/63058) +- Fixed x509.create_csr creates invalid CSR by default in the new cryptography x509 module. [#63103](https://github.com/saltstack/salt/issues/63103) +- TCP transport documentation now contains proper master/minion-side filtering information [#63120](https://github.com/saltstack/salt/issues/63120) +- Fixed gpg.verify does not respect gnupghome [#63145](https://github.com/saltstack/salt/issues/63145) +- Made pillar cache pass extra minion data as well [#63208](https://github.com/saltstack/salt/issues/63208) +- Fix serious performance issues with the file.tidied module [#63231](https://github.com/saltstack/salt/issues/63231) +- Fix rpm_lowpkg version comparison logic when using rpm-vercmp and only one version has a release number. [#63317](https://github.com/saltstack/salt/issues/63317) +- Import StrictVersion and LooseVersion from setuptools.distutils.verison or setuptools._distutils.version, if first not available [#63350](https://github.com/saltstack/salt/issues/63350) +- When the shell is passed as powershell or pwsh, only wrapper the shell in quotes if cmd.run is running on Windows. When quoted on Linux hosts, this results in an error when the keyword arguments are appended. [#63590](https://github.com/saltstack/salt/issues/63590) +- LGPO: Added support for "Relax minimum password length limits" [#63596](https://github.com/saltstack/salt/issues/63596) +- When a job is disabled only increase it's _next_fire_time value if the job would have run at the current time, eg. the current _next_fire_time == now. [#63699](https://github.com/saltstack/salt/issues/63699) +- Check file is not empty before attempting to read pillar disk cache file [#63729](https://github.com/saltstack/salt/issues/63729) +- fix cherrypy 400 error output to be less generic. [#63835](https://github.com/saltstack/salt/issues/63835) +- remove eval and update logging to be more informative on bad config [#63879](https://github.com/saltstack/salt/issues/63879) + +# Added + +- Introduce a `LIB_STATE_DIR` syspaths variable which defaults to `CONFIG_DIR`, + but can be individually customized during installation by specifying + `--salt-lib-state-dir` during installation. Change the default `pki_dir` to + `/pki/master` (for the master) and `/pki/minion` + (for the minion). [#3396](https://github.com/saltstack/salt/issues/3396) +- Allow users to enable 'queue=True' for all state runs via config file [#31468](https://github.com/saltstack/salt/issues/31468) +- Added pillar templating to vault policies [#43287](https://github.com/saltstack/salt/issues/43287) +- Add support for NVMeF as a transport protocol for hosts in a Pure Storage FlashArray [#51088](https://github.com/saltstack/salt/issues/51088) +- A new salt-ssh roster that generates a roster by parses a known_hosts file. [#54679](https://github.com/saltstack/salt/issues/54679) +- Added Windows Event Viewer support [#54713](https://github.com/saltstack/salt/issues/54713) +- Added the win_lgpo_reg state and execution modules which will allow registry based group policy to be set directly in the Registry.pol file [#56013](https://github.com/saltstack/salt/issues/56013) +- Added resource tagging functions to boto_dynamodb execution module [#57500](https://github.com/saltstack/salt/issues/57500) +- Added `openvswitch_db` state module and functions `bridge_to_parent`, + `bridge_to_vlan`, `db_get`, and `db_set` to the `openvswitch` execution module. + Also added optional `parent` and `vlan` parameters to the + `openvswitch_bridge.present` state module function and the + `openvswitch.bridge_create` execution module function. [#58986](https://github.com/saltstack/salt/issues/58986) +- State module to manage SysFS attributes [#60154](https://github.com/saltstack/salt/issues/60154) +- Added ability for `salt.wait_for_event` to handle `event_id`s that have a list value. [#60430](https://github.com/saltstack/salt/issues/60430) +- Added suport for Linux ppc64le core grains (cpu_model, virtual, productname, manufacturer, serialnumber) and arm core grains (serialnumber, productname) [#60518](https://github.com/saltstack/salt/issues/60518) +- Added autostart option to virt.defined and virt.running states, along with virt.update execution modules. [#60700](https://github.com/saltstack/salt/issues/60700) +- Added .0 back to our versioning scheme for future versions (e.g. 3006.0) [#60722](https://github.com/saltstack/salt/issues/60722) +- Initial work to allow parallel startup of proxy minions when used as sub proxies with Deltaproxy. [#61153](https://github.com/saltstack/salt/issues/61153) +- Added node label support for GCE [#61245](https://github.com/saltstack/salt/issues/61245) +- Support the --priority flag when adding sources to Chocolatey. [#61319](https://github.com/saltstack/salt/issues/61319) +- Add namespace option to ext_pillar.http_json [#61335](https://github.com/saltstack/salt/issues/61335) +- Added a filter function to ps module to get a list of processes on a minion according to their state. [#61420](https://github.com/saltstack/salt/issues/61420) +- Add postgres.timeout option to postgres module for limiting postgres query times [#61433](https://github.com/saltstack/salt/issues/61433) +- Added new optional vault option, ``config_location``. This can be either ``master`` or ``local`` and defines where vault will look for connection details, either requesting them from the master or using the local config. [#61857](https://github.com/saltstack/salt/issues/61857) +- Add ipwrap() jinja filter to wrap IPv6 addresses with brackets. [#61931](https://github.com/saltstack/salt/issues/61931) +- 'tcp' transport is now available in ipv6-only network [#62009](https://github.com/saltstack/salt/issues/62009) +- Add `diff_attr` parameter to pkg.upgrade() (zypper/yum). [#62031](https://github.com/saltstack/salt/issues/62031) +- Config option pass_variable_prefix allows to distinguish variables that contain paths to pass secrets. + Config option pass_strict_fetch allows to error out when a secret cannot be fetched from pass. + Config option pass_dir allows setting the PASSWORD_STORE_DIR env for pass. + Config option pass_gnupghome allows setting the $GNUPGHOME env for pass. [#62120](https://github.com/saltstack/salt/issues/62120) +- Add file.pruned state and expanded file.rmdir exec module functionality [#62178](https://github.com/saltstack/salt/issues/62178) +- Added "dig.PTR" function to resolve PTR records for IPs, as well as tests and documentation [#62275](https://github.com/saltstack/salt/issues/62275) +- Added the ability to remove a KB using the DISM state/execution modules [#62366](https://github.com/saltstack/salt/issues/62366) +- Add " python" subcommand to allow execution or arbitrary scripts via bundled Python runtime [#62381](https://github.com/saltstack/salt/issues/62381) +- Add ability to provide conditions which convert normal state actions to no-op when true [#62446](https://github.com/saltstack/salt/issues/62446) +- Added debug log messages displaying the command being run when installing packages on Windows [#62480](https://github.com/saltstack/salt/issues/62480) +- Add biosvendor grain [#62496](https://github.com/saltstack/salt/issues/62496) +- Add ifelse Jinja function as found in CFEngine [#62508](https://github.com/saltstack/salt/issues/62508) +- Implementation of Amazon EC2 instance detection and setting `virtual_subtype` grain accordingly including the product if possible to identify. [#62539](https://github.com/saltstack/salt/issues/62539) +- Adds __env__substitution to ext_pillar.stack; followup of #61531, improved exception handling for stacked template (jinja) template rendering and yaml parsing in ext_pillar.stack [#62578](https://github.com/saltstack/salt/issues/62578) +- Increase file.tidied flexibility with regard to age and size [#62678](https://github.com/saltstack/salt/issues/62678) +- Added "connected_devices" feature to netbox pillar module. It contains extra information about devices connected to the minion [#62761](https://github.com/saltstack/salt/issues/62761) +- Add atomic file operation for symlink changes [#62768](https://github.com/saltstack/salt/issues/62768) +- Add password/account locking/unlocking in user.present state on supported operating systems [#62856](https://github.com/saltstack/salt/issues/62856) +- Added onchange configuration for script engine [#62867](https://github.com/saltstack/salt/issues/62867) +- Added output and bare functionality to export_key gpg module function [#62978](https://github.com/saltstack/salt/issues/62978) +- Add keyvalue serializer for environment files [#62983](https://github.com/saltstack/salt/issues/62983) +- Add ability to ignore symlinks in file.tidied [#63042](https://github.com/saltstack/salt/issues/63042) +- salt-cloud support IMDSv2 tokens when using 'use-instance-role-credentials' [#63067](https://github.com/saltstack/salt/issues/63067) +- Add ability for file.symlink to not set ownership on existing links [#63093](https://github.com/saltstack/salt/issues/63093) +- Restore the previous slack engine and deprecate it, rename replace the slack engine to slack_bolt until deprecation [#63095](https://github.com/saltstack/salt/issues/63095) +- Add functions that will return the underlying block device, mount point, and filesystem type for a given path [#63098](https://github.com/saltstack/salt/issues/63098) +- Add ethtool execution and state module functions for pause [#63128](https://github.com/saltstack/salt/issues/63128) +- Add boardname grain [#63131](https://github.com/saltstack/salt/issues/63131) +- Added management of ECDSA/EdDSA private keys with x509 modules in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63248](https://github.com/saltstack/salt/issues/63248) +- Added x509 modules support for different output formats in the new cryptography x509 module. Please migrate to the new cryptography x509 module for this improvement. [#63249](https://github.com/saltstack/salt/issues/63249) +- Added deprecation_warning test state for ensuring that deprecation warnings are correctly emitted. [#63315](https://github.com/saltstack/salt/issues/63315) +- Adds a state_events option to state.highstate, state.apply, state.sls, state.sls_id. + This allows users to enable state_events on a per use basis rather than having to + enable them globally for all state runs. [#63316](https://github.com/saltstack/salt/issues/63316) +- Allow max queue size setting for state runs to prevent performance problems from queue growth [#63356](https://github.com/saltstack/salt/issues/63356) +- Add support of exposing meta_server_grains for Azure VMs [#63606](https://github.com/saltstack/salt/issues/63606) +- Include the version of `relenv` in the versions report. [#63827](https://github.com/saltstack/salt/issues/63827) +- Added debug log messages displaying the command being run when removing packages on Windows [#63866](https://github.com/saltstack/salt/issues/63866) + + * Wed Mar 01 2023 Salt Project Packaging - 3006.0~rc1 # Removed diff --git a/pkg/tests/conftest.py b/pkg/tests/conftest.py deleted file mode 100644 index 321945adeb3c..000000000000 --- a/pkg/tests/conftest.py +++ /dev/null @@ -1,391 +0,0 @@ -import logging -import pathlib -import re -import shutil - -import pytest -from pytestskipmarkers.utils import platform -from saltfactories.utils import cli_scripts, random_string -from saltfactories.utils.tempfiles import SaltPillarTree, SaltStateTree - -from tests.support.helpers import ( - ARTIFACTS_DIR, - CODE_DIR, - TESTS_DIR, - ApiRequest, - SaltMaster, - SaltMasterWindows, - SaltPkgInstall, - TestUser, -) - -log = logging.getLogger(__name__) - - -@pytest.fixture(scope="session") -def version(install_salt): - """ - get version number from artifact - """ - return install_salt.get_version(version_only=True) - - -def pytest_addoption(parser): - """ - register argparse-style options and ini-style config values. - """ - test_selection_group = parser.getgroup("Tests Runtime Selection") - test_selection_group.addoption( - "--system-service", - default=False, - action="store_true", - help="Run the daemons as system services", - ) - test_selection_group.addoption( - "--upgrade", - default=False, - action="store_true", - help="Install previous version and then upgrade then run tests", - ) - test_selection_group.addoption( - "--no-install", - default=False, - action="store_true", - help="Do not install salt and use a previous install Salt package", - ) - test_selection_group.addoption( - "--no-uninstall", - default=False, - action="store_true", - help="Do not uninstall salt packages after test run is complete", - ) - test_selection_group.addoption( - "--classic", - default=False, - action="store_true", - help="Test an upgrade from the classic packages.", - ) - test_selection_group.addoption( - "--prev-version", - action="store", - help="Test an upgrade from the version specified.", - ) - - -@pytest.fixture(scope="session") -def salt_factories_root_dir(request, tmp_path_factory): - root_dir = SaltPkgInstall.salt_factories_root_dir( - request.config.getoption("--system-service") - ) - if root_dir is not None: - yield root_dir - else: - if platform.is_darwin(): - root_dir = pathlib.Path("/tmp/salt-tests-tmpdir") - root_dir.mkdir(mode=0o777, parents=True, exist_ok=True) - else: - root_dir = tmp_path_factory.mktemp("salt-tests") - try: - yield root_dir - finally: - shutil.rmtree(str(root_dir), ignore_errors=True) - - -@pytest.fixture(scope="session") -def salt_factories_config(salt_factories_root_dir): - return { - "code_dir": CODE_DIR, - "root_dir": salt_factories_root_dir, - "system_install": True, - } - - -@pytest.fixture(scope="session") -def install_salt(request, salt_factories_root_dir): - with SaltPkgInstall( - conf_dir=salt_factories_root_dir / "etc" / "salt", - system_service=request.config.getoption("--system-service"), - upgrade=request.config.getoption("--upgrade"), - no_uninstall=request.config.getoption("--no-uninstall"), - no_install=request.config.getoption("--no-install"), - classic=request.config.getoption("--classic"), - prev_version=request.config.getoption("--prev-version"), - ) as fixture: - yield fixture - - -@pytest.fixture(scope="session") -def salt_factories(salt_factories, salt_factories_root_dir): - salt_factories.root_dir = salt_factories_root_dir - return salt_factories - - -@pytest.fixture(scope="session") -def state_tree(): - if platform.is_windows(): - file_root = pathlib.Path("C:/salt/srv/salt") - elif platform.is_darwin(): - file_root = pathlib.Path("/opt/srv/salt") - else: - file_root = pathlib.Path("/srv/salt") - envs = { - "base": [ - str(file_root), - str(TESTS_DIR / "files"), - ], - } - tree = SaltStateTree(envs=envs) - test_sls_contents = """ - test_foo: - test.succeed_with_changes: - - name: foo - """ - states_sls_contents = """ - update: - pkg.installed: - - name: bash - salt_dude: - user.present: - - name: dude - - fullname: Salt Dude - """ - win_states_sls_contents = """ - create_empty_file: - file.managed: - - name: C://salt/test/txt - salt_dude: - user.present: - - name: dude - - fullname: Salt Dude - """ - with tree.base.temp_file("test.sls", test_sls_contents), tree.base.temp_file( - "states.sls", states_sls_contents - ), tree.base.temp_file("win_states.sls", win_states_sls_contents): - yield tree - - -@pytest.fixture(scope="session") -def pillar_tree(): - """ - Add pillar files - """ - if platform.is_windows(): - pillar_root = pathlib.Path("C:/salt/srv/pillar") - elif platform.is_darwin(): - pillar_root = pathlib.Path("/opt/srv/pillar") - else: - pillar_root = pathlib.Path("/srv/pillar") - pillar_root.mkdir(mode=0o777, parents=True, exist_ok=True) - tree = SaltPillarTree( - envs={ - "base": [ - str(pillar_root), - ] - }, - ) - top_file_contents = """ - base: - '*': - - test - """ - test_file_contents = """ - info: test - """ - with tree.base.temp_file("top.sls", top_file_contents), tree.base.temp_file( - "test.sls", test_file_contents - ): - yield tree - - -@pytest.fixture(scope="module") -def sls(state_tree): - """ - Add an sls file - """ - test_sls_contents = """ - test_foo: - test.succeed_with_changes: - - name: foo - """ - states_sls_contents = """ - update: - pkg.installed: - - name: bash - salt_dude: - user.present: - - name: dude - - fullname: Salt Dude - """ - win_states_sls_contents = """ - create_empty_file: - file.managed: - - name: C://salt/test/txt - salt_dude: - user.present: - - name: dude - - fullname: Salt Dude - """ - with state_tree.base.temp_file( - "tests.sls", test_sls_contents - ), state_tree.base.temp_file( - "states.sls", states_sls_contents - ), state_tree.base.temp_file( - "win_states.sls", win_states_sls_contents - ): - yield - - -@pytest.fixture(scope="session") -def salt_master(salt_factories, install_salt, state_tree, pillar_tree): - """ - Start up a master - """ - start_timeout = None - # Since the daemons are "packaged" with tiamat, the salt plugins provided - # by salt-factories won't be discovered. Provide the required `*_dirs` on - # the configuration so that they can still be used. - config_defaults = { - "engines_dirs": [ - str(salt_factories.get_salt_engines_path()), - ], - "log_handlers_dirs": [ - str(salt_factories.get_salt_log_handlers_path()), - ], - } - if platform.is_darwin(): - config_defaults["enable_fqdns_grains"] = False - config_overrides = { - "timeout": 30, - "file_roots": state_tree.as_dict(), - "pillar_roots": pillar_tree.as_dict(), - "rest_cherrypy": {"port": 8000, "disable_ssl": True}, - "netapi_enable_clients": ["local"], - "external_auth": {"auto": {"saltdev": [".*"]}}, - } - if (platform.is_windows() or platform.is_darwin()) and install_salt.singlebin: - start_timeout = 240 - # For every minion started we have to accept it's key. - # On windows, using single binary, it has to decompress it and run the command. Too slow. - # So, just in this scenario, use open mode - config_overrides["open_mode"] = True - master_script = False - if platform.is_windows(): - if install_salt.classic: - master_script = True - # this check will need to be changed to install_salt.relenv - # once the package version returns 3006 and not 3005 on master - elif not install_salt.upgrade: - master_script = True - - if master_script: - salt_factories.system_install = False - scripts_dir = salt_factories.root_dir / "Scripts" - scripts_dir.mkdir(exist_ok=True) - salt_factories.scripts_dir = scripts_dir - config_overrides["open_mode"] = True - python_executable = install_salt.bin_dir / "Scripts" / "python.exe" - if install_salt.classic: - python_executable = install_salt.bin_dir / "python.exe" - factory = salt_factories.salt_master_daemon( - random_string("master-"), - defaults=config_defaults, - overrides=config_overrides, - factory_class=SaltMasterWindows, - salt_pkg_install=install_salt, - python_executable=python_executable, - ) - salt_factories.system_install = True - else: - factory = salt_factories.salt_master_daemon( - random_string("master-"), - defaults=config_defaults, - overrides=config_overrides, - factory_class=SaltMaster, - salt_pkg_install=install_salt, - ) - factory.after_terminate(pytest.helpers.remove_stale_master_key, factory) - with factory.started(start_timeout=start_timeout): - yield factory - - -@pytest.fixture(scope="session") -def salt_minion(salt_factories, salt_master, install_salt): - """ - Start up a minion - """ - start_timeout = None - if (platform.is_windows() or platform.is_darwin()) and install_salt.singlebin: - start_timeout = 240 - minion_id = random_string("minion-") - # Since the daemons are "packaged" with tiamat, the salt plugins provided - # by salt-factories won't be discovered. Provide the required `*_dirs` on - # the configuration so that they can still be used. - config_defaults = { - "engines_dirs": salt_master.config["engines_dirs"].copy(), - "log_handlers_dirs": salt_master.config["log_handlers_dirs"].copy(), - } - if platform.is_darwin(): - config_defaults["enable_fqdns_grains"] = False - config_overrides = { - "id": minion_id, - "file_roots": salt_master.config["file_roots"].copy(), - "pillar_roots": salt_master.config["pillar_roots"].copy(), - } - if platform.is_windows(): - config_overrides[ - "winrepo_dir" - ] = rf"{salt_factories.root_dir}\srv\salt\win\repo" - config_overrides[ - "winrepo_dir_ng" - ] = rf"{salt_factories.root_dir}\srv\salt\win\repo_ng" - config_overrides["winrepo_source_dir"] = r"salt://win/repo_ng" - factory = salt_master.salt_minion_daemon( - minion_id, - overrides=config_overrides, - defaults=config_defaults, - ) - factory.after_terminate( - pytest.helpers.remove_stale_minion_key, salt_master, factory.id - ) - with factory.started(start_timeout=start_timeout): - yield factory - - -@pytest.fixture(scope="module") -def salt_cli(salt_master): - return salt_master.salt_cli() - - -@pytest.fixture(scope="module") -def salt_key_cli(salt_master): - return salt_master.salt_key_cli() - - -@pytest.fixture(scope="module") -def salt_call_cli(salt_minion): - return salt_minion.salt_call_cli() - - -@pytest.fixture(scope="module") -def test_account(salt_call_cli): - with TestUser(salt_call_cli=salt_call_cli) as account: - yield account - - -@pytest.fixture(scope="module") -def salt_api(salt_master, install_salt): - """ - start up and configure salt_api - """ - start_timeout = None - if platform.is_windows() and install_salt.singlebin: - start_timeout = 240 - factory = salt_master.salt_api_daemon() - with factory.started(start_timeout=start_timeout): - yield factory - - -@pytest.fixture(scope="module") -def api_request(test_account, salt_api): - with ApiRequest(salt_api=salt_api, test_account=test_account) as session: - yield session diff --git a/pkg/tests/files/check_imports.sls b/pkg/tests/files/check_imports.sls deleted file mode 100644 index 0dde9d6ad332..000000000000 --- a/pkg/tests/files/check_imports.sls +++ /dev/null @@ -1,53 +0,0 @@ -#!py -import importlib - -def run(): - config = {} - for test_import in [ - 'templates', 'platform', 'cli', 'executors', 'config', 'wheel', 'netapi', - 'cache', 'proxy', 'transport', 'metaproxy', 'modules', 'tokens', 'matchers', - 'acl', 'auth', 'log', 'engines', 'client', 'returners', 'runners', 'tops', - 'output', 'daemons', 'thorium', 'renderers', 'states', 'cloud', 'roster', - 'beacons', 'pillar', 'spm', 'utils', 'sdb', 'fileserver', 'defaults', - 'ext', 'queues', 'grains', 'serializers' - ]: - try: - import_name = "salt.{}".format(test_import) - importlib.import_module(import_name) - config['test_imports_succeeded'] = { - 'test.succeed_without_changes': [ - { - 'name': import_name - }, - ], - } - except ModuleNotFoundError as err: - config['test_imports_failed'] = { - 'test.fail_without_changes': [ - { - 'name': import_name, - 'comment': "The imports test failed. The error was: {}".format(err) - }, - ], - } - - for stdlib_import in ["telnetlib"]: - try: - importlib.import_module(stdlib_import) - config['stdlib_imports_succeeded'] = { - 'test.succeed_without_changes': [ - { - 'name': stdlib_import - }, - ], - } - except ModuleNotFoundError as err: - config['stdlib_imports_failed'] = { - 'test.fail_without_changes': [ - { - 'name': stdlib_import, - 'comment': "The stdlib imports test failed. The error was: {}".format(err) - }, - ], - } - return config diff --git a/pkg/tests/files/check_python.py b/pkg/tests/files/check_python.py deleted file mode 100644 index f1d46b76df7b..000000000000 --- a/pkg/tests/files/check_python.py +++ /dev/null @@ -1,13 +0,0 @@ -import sys - -import salt.utils.data - -user_arg = sys.argv - -if user_arg[1] == "raise": - raise Exception("test") - -if salt.utils.data.is_true(user_arg[1]): - sys.exit(0) -else: - sys.exit(1) diff --git a/pkg/tests/integration/test_check_imports.py b/pkg/tests/integration/test_check_imports.py deleted file mode 100644 index 9a0f1c5fbe48..000000000000 --- a/pkg/tests/integration/test_check_imports.py +++ /dev/null @@ -1,22 +0,0 @@ -import logging - -import pytest -from saltfactories.utils.functional import MultiStateResult - -pytestmark = [ - pytest.mark.skip_on_windows, -] - -log = logging.getLogger(__name__) - - -def test_check_imports(salt_cli, salt_minion): - """ - Test imports - """ - ret = salt_cli.run("state.sls", "check_imports", minion_tgt=salt_minion.id) - assert ret.returncode == 0 - assert ret.data - result = MultiStateResult(raw=ret.data) - for state_ret in result: - assert state_ret.result is True diff --git a/pkg/tests/integration/test_help.py b/pkg/tests/integration/test_help.py deleted file mode 100644 index 2f701c624943..000000000000 --- a/pkg/tests/integration/test_help.py +++ /dev/null @@ -1,14 +0,0 @@ -def test_help(install_salt): - """ - Test --help works for all salt cmds - """ - for cmd in install_salt.binary_paths.values(): - # TODO: add back salt-cloud and salt-ssh when its fixed - cmd = [str(x) for x in cmd] - if "python" in cmd[0]: - ret = install_salt.proc.run(*cmd, "--version") - assert "Python" in ret.stdout - else: - ret = install_salt.proc.run(*cmd, "--help") - assert "Usage" in ret.stdout - assert ret.returncode == 0 diff --git a/pkg/tests/integration/test_pip.py b/pkg/tests/integration/test_pip.py deleted file mode 100644 index 29fa1a87bc6c..000000000000 --- a/pkg/tests/integration/test_pip.py +++ /dev/null @@ -1,135 +0,0 @@ -import os -import pathlib -import subprocess - -import pytest -from pytestskipmarkers.utils import platform - - -@pytest.fixture -def pypath(): - if platform.is_windows(): - return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt") - elif platform.is_darwin(): - return pathlib.Path(f"{os.sep}opt", "salt", "bin") - else: - return pathlib.Path(f"{os.sep}opt", "saltstack", "salt", "bin") - - -@pytest.fixture(autouse=True) -def wipe_pydeps(pypath, install_salt): - try: - yield - finally: - for dep in ["pep8", "PyGithub"]: - subprocess.run( - install_salt.binary_paths["pip"] + ["uninstall", "-y", dep], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - - -def test_pip_install(salt_call_cli): - """ - Test pip.install and ensure module can use installed library - """ - dep = "PyGithub" - repo = "https://github.com/saltstack/salt.git" - - try: - install = salt_call_cli.run("--local", "pip.install", dep) - assert install.returncode == 0 - - use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) - assert "Authentication information could" in use_lib.stderr - finally: - ret = salt_call_cli.run("--local", "pip.uninstall", dep) - assert ret.returncode == 0 - use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) - assert "The github execution module cannot be loaded" in use_lib.stderr - - -def demote(user_uid, user_gid): - def result(): - os.setgid(user_gid) - os.setuid(user_uid) - - return result - - -@pytest.mark.skip_on_windows(reason="We can't easily demote users on Windows") -def test_pip_non_root(install_salt, test_account, pypath): - check_path = pypath / "pep8" - # Lets make sure pep8 is not currently installed - subprocess.run( - install_salt.binary_paths["pip"] + ["uninstall", "-y", "pep8"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - - assert not check_path.exists() - # We should be able to issue a --help without being root - ret = subprocess.run( - install_salt.binary_paths["salt"] + ["--help"], - preexec_fn=demote(test_account.uid, test_account.gid), - env=test_account.env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - assert ret.returncode == 0, ret.stderr - assert "Usage" in ret.stdout - assert not check_path.exists() - - # Try to pip install something, should fail - ret = subprocess.run( - install_salt.binary_paths["pip"] + ["install", "pep8"], - preexec_fn=demote(test_account.uid, test_account.gid), - env=test_account.env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - assert ret.returncode == 1, ret.stderr - assert "Could not install packages due to an OSError" in ret.stderr - assert not check_path.exists() - - # Let tiamat-pip create the pypath directory for us - ret = subprocess.run( - install_salt.binary_paths["pip"] + ["install", "-h"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - assert ret.returncode == 0, ret.stderr - - # Now, we should still not be able to install as non-root - ret = subprocess.run( - install_salt.binary_paths["pip"] + ["install", "pep8"], - preexec_fn=demote(test_account.uid, test_account.gid), - env=test_account.env, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - assert ret.returncode != 0, ret.stderr - # But we should be able to install as root - ret = subprocess.run( - install_salt.binary_paths["pip"] + ["install", "pep8"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - - assert check_path.exists() - - assert ret.returncode == 0, ret.stderr diff --git a/pkg/tests/integration/test_python.py b/pkg/tests/integration/test_python.py deleted file mode 100644 index e6ed5c2c34f7..000000000000 --- a/pkg/tests/integration/test_python.py +++ /dev/null @@ -1,31 +0,0 @@ -import subprocess - -import pytest - -from tests.support.helpers import TESTS_DIR - - -@pytest.mark.parametrize("exp_ret,user_arg", [(1, "false"), (0, "true")]) -def test_python_script(install_salt, exp_ret, user_arg): - ret = subprocess.run( - install_salt.binary_paths["python"] - + [str(TESTS_DIR / "files" / "check_python.py"), user_arg], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - - assert ret.returncode == exp_ret, ret.stderr - - -def test_python_script_exception(install_salt): - ret = subprocess.run( - install_salt.binary_paths["python"] - + [str(TESTS_DIR / "files" / "check_python.py"), "raise"], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - check=False, - universal_newlines=True, - ) - assert "Exception: test" in ret.stderr diff --git a/pkg/tests/integration/test_salt_pillar.py b/pkg/tests/integration/test_salt_pillar.py deleted file mode 100644 index 007bae21f965..000000000000 --- a/pkg/tests/integration/test_salt_pillar.py +++ /dev/null @@ -1,13 +0,0 @@ -import pytest - -pytestmark = [ - pytest.mark.skip_on_windows, -] - - -def test_salt_pillar(salt_cli, salt_minion): - """ - Test pillar.items - """ - ret = salt_cli.run("pillar.items", minion_tgt=salt_minion.id) - assert "info" in ret.data diff --git a/pkg/tests/integration/test_salt_state_file.py b/pkg/tests/integration/test_salt_state_file.py deleted file mode 100644 index b951b48bde66..000000000000 --- a/pkg/tests/integration/test_salt_state_file.py +++ /dev/null @@ -1,24 +0,0 @@ -import sys - -import pytest - -pytestmark = [ - pytest.mark.skip_on_windows, -] - - -def test_salt_state_file(salt_cli, salt_minion): - """ - Test state file - """ - if sys.platform.startswith("win"): - ret = salt_cli.run("state.apply", "win_states", minion_tgt=salt_minion.id) - else: - ret = salt_cli.run("state.apply", "states", minion_tgt=salt_minion.id) - - assert ret.data, ret - if ret.stdout and "Minion did not return" in ret.stdout: - pytest.skip("Skipping test, state took too long to apply") - sls_ret = ret.data[next(iter(ret.data))] - assert "changes" in sls_ret - assert "name" in sls_ret diff --git a/pkg/tests/integration/test_systemd_config.py b/pkg/tests/integration/test_systemd_config.py deleted file mode 100644 index b57a018d1ab7..000000000000 --- a/pkg/tests/integration/test_systemd_config.py +++ /dev/null @@ -1,46 +0,0 @@ -import subprocess - -import pytest - -pytestmark = [ - pytest.mark.skip_on_windows(reason="Linux test only"), -] - - -def test_system_config(salt_cli, salt_minion): - """ - Test system config - """ - get_family = salt_cli.run("grains.get", "os_family", minion_tgt=salt_minion.id) - assert get_family.returncode == 0 - get_finger = salt_cli.run("grains.get", "osfinger", minion_tgt=salt_minion.id) - assert get_finger.returncode == 0 - - if get_family.data == "RedHat": - if get_finger.data in ( - "CentOS Stream-8", - "CentOS Linux-8", - "CentOS Stream-9", - "Fedora Linux-36", - ): - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 0 - else: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 1 - - elif "Debian" in get_family.stdout: - if "Debian-9" in get_finger.stdout: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 1 - else: - ret = subprocess.call( - "systemctl show -p ${config} salt-minion.service", shell=True - ) - assert ret == 0 diff --git a/pkg/tests/support/coverage/sitecustomize.py b/pkg/tests/support/coverage/sitecustomize.py deleted file mode 100644 index bee2ff80f2f5..000000000000 --- a/pkg/tests/support/coverage/sitecustomize.py +++ /dev/null @@ -1,11 +0,0 @@ -""" -Python will always try to import sitecustomize. -We use that fact to try and support code coverage for sub-processes -""" - -try: - import coverage - - coverage.process_startup() -except ImportError: - pass diff --git a/pkg/tests/upgrade/test_salt_upgrade.py b/pkg/tests/upgrade/test_salt_upgrade.py deleted file mode 100644 index c141ed6904fa..000000000000 --- a/pkg/tests/upgrade/test_salt_upgrade.py +++ /dev/null @@ -1,36 +0,0 @@ -import pytest - - -def test_salt_upgrade(salt_call_cli, salt_minion, install_salt): - """ - Test upgrade of Salt - """ - if not install_salt.upgrade: - pytest.skip("Not testing an upgrade, do not run") - # verify previous install version is setup correctly and works - ret = salt_call_cli.run("test.ping") - assert ret.returncode == 0 - assert ret.data - - # test pip install before an upgrade - dep = "PyGithub" - repo = "https://github.com/saltstack/salt.git" - install = salt_call_cli.run("--local", "pip.install", dep) - assert install.returncode == 0 - use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) - assert "Authentication information could" in use_lib.stderr - # upgrade Salt from previous version and test - install_salt.install(upgrade=True) - ret = salt_call_cli.run("test.ping") - assert ret.returncode == 0 - assert ret.data - - # install dep following upgrade - # TODO: Remove this once we figure out how to - # preserve things installed via PIP between upgrades. - install = salt_call_cli.run("--local", "pip.install", dep) - assert install.returncode == 0 - - # test pip install after an upgrade - use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) - assert "Authentication information could" in use_lib.stderr diff --git a/pkg/windows/build.ps1 b/pkg/windows/build.ps1 index 0d87d6040079..ecd188327869 100644 --- a/pkg/windows/build.ps1 +++ b/pkg/windows/build.ps1 @@ -38,17 +38,15 @@ param( [Parameter(Mandatory=$false)] [ValidatePattern("^\d{1,2}.\d{1,2}.\d{1,2}$")] - [ValidateSet( - "3.11.2", - "3.10.10" - )] [Alias("p")] - # The version of Python to be built. Pythonnet only supports up to Python - # 3.8 for now. Pycurl stopped building wheel files after 7.43.0.5 which - # supported up to 3.8. So we're pinned to the latest version of Python 3.8. - # We may have to drop support for pycurl. - # Default is: 3.8.16 - [String] $PythonVersion = "3.10.10", + # The version of Python to build/fetch. This is tied to the version of + # Relenv + [String] $PythonVersion, + + [Parameter(Mandatory=$false)] + [Alias("r")] + # The version of Relenv to install + [String] $RelenvVersion, [Parameter(Mandatory=$false)] [Alias("b")] @@ -62,7 +60,7 @@ param( [Switch] $CICD, [Parameter(Mandatory=$false)] - # Don't install. It should already be installed + # Don't install/build python. It should already be installed [Switch] $SkipInstall ) @@ -103,6 +101,33 @@ if ( [String]::IsNullOrEmpty($Version) ) { } } +#------------------------------------------------------------------------------- +# Verify Python and Relenv Versions +#------------------------------------------------------------------------------- + +$yaml = Get-Content -Path "$PROJECT_DIR\cicd\shared-gh-workflows-context.yml" +$dict_versions = @{} +$yaml | ForEach-Object { + $val1, $val2 = $_ -split ": " + $dict_versions[$val1] = $val2.Trim("""") +} + +if ( [String]::IsNullOrEmpty($PythonVersion) ) { + $PythonVersion = $dict_versions["python_version"] + if ( [String]::IsNullOrEmpty($PythonVersion) ) { + Write-Host "Failed to load Python Version" + exit 1 + } +} + +if ( [String]::IsNullOrEmpty($RelenvVersion) ) { + $RelenvVersion = $dict_versions["relenv_version"] + if ( [String]::IsNullOrEmpty($RelenvVersion) ) { + Write-Host "Failed to load Relenv Version" + exit 1 + } +} + #------------------------------------------------------------------------------- # Start the Script #------------------------------------------------------------------------------- @@ -111,6 +136,7 @@ Write-Host $("#" * 80) Write-Host "Build Salt Installer Packages" -ForegroundColor Cyan Write-Host "- Salt Version: $Version" Write-Host "- Python Version: $PythonVersion" +Write-Host "- Relenv Version: $RelenvVersion" Write-Host "- Architecture: $Architecture" Write-Host $("v" * 80) @@ -165,9 +191,10 @@ if ( ! $SkipInstall ) { $KeywordArguments = @{ Version = $PythonVersion Architecture = $Architecture + RelenvVersion = $RelenvVersion } if ( $Build ) { - $KeywordArguments["Build"] = $true + $KeywordArguments["Build"] = $false } if ( $CICD ) { $KeywordArguments["CICD"] = $true diff --git a/pkg/windows/build_python.ps1 b/pkg/windows/build_python.ps1 index 8b0d4bb240a4..28aee58fbd2a 100644 --- a/pkg/windows/build_python.ps1 +++ b/pkg/windows/build_python.ps1 @@ -17,17 +17,15 @@ build_python.ps1 -Version 3.10.9 -Architecture x86 param( [Parameter(Mandatory=$false)] [ValidatePattern("^\d{1,2}.\d{1,2}.\d{1,2}$")] - [ValidateSet( - "3.11.2", - "3.10.10" - )] [Alias("v")] - # The version of Python to be built. Pythonnet only supports up to Python - # 3.8 for now. Pycurl stopped building wheel files after 7.43.0.5 which - # supported up to 3.8. So we're pinned to the latest version of Python 3.8. - # We may have to drop support for pycurl or build it ourselves. - # Default is: 3.8.16 - [String] $Version = "3.10.10", + # The version of python to build/fetch. This is tied to the version of + # Relenv + [String] $Version, + + [Parameter(Mandatory=$false)] + [Alias("r")] + # The version of Relenv to install + [String] $RelenvVersion, [Parameter(Mandatory=$false)] [ValidateSet("x64", "x86", "amd64")] @@ -73,6 +71,33 @@ function Write-Result($result, $ForegroundColor="Green") { Write-Host -ForegroundColor $ForegroundColor ("{0,$position}$result" -f "") }} +#------------------------------------------------------------------------------- +# Verify Python and Relenv Versions +#------------------------------------------------------------------------------- + +$yaml = Get-Content -Path "$PROJECT_DIR\cicd\shared-gh-workflows-context.yml" +$dict_versions = @{} +$yaml | ForEach-Object { + $val1, $val2 = $_ -split ": " + $dict_versions[$val1] = $val2.Trim("""") +} + +if ( [String]::IsNullOrEmpty($Version) ) { + $Version = $dict_versions["python_version"] + if ( [String]::IsNullOrEmpty($Version) ) { + Write-Host "Failed to load Python Version" + exit 1 + } +} + +if ( [String]::IsNullOrEmpty($RelenvVersion) ) { + $RelenvVersion = $dict_versions["relenv_version"] + if ( [String]::IsNullOrEmpty($RelenvVersion) ) { + Write-Host "Failed to load Relenv Version" + exit 1 + } +} + #------------------------------------------------------------------------------- # Start the Script #------------------------------------------------------------------------------- @@ -85,6 +110,7 @@ if ( $Build ) { } Write-Host "$SCRIPT_MSG" -ForegroundColor Cyan Write-Host "- Python Version: $Version" +Write-Host "- Relenv Version: $RelenvVersion" Write-Host "- Architecture: $Architecture" Write-Host "- Build: $Build" Write-Host $("-" * 80) @@ -149,7 +175,6 @@ if ( $env:VIRTUAL_ENV ) { #------------------------------------------------------------------------------- $SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName $BUILD_DIR = "$SCRIPT_DIR\buildenv" -$SCRIPTS_DIR = "$BUILD_DIR\Scripts" $RELENV_DIR = "${env:LOCALAPPDATA}\relenv" $SYS_PY_BIN = (python -c "import sys; print(sys.executable)") $BLD_PY_BIN = "$BUILD_DIR\Scripts\python.exe" @@ -227,7 +252,7 @@ if ( $env:VIRTUAL_ENV ) { # Installing Relenv #------------------------------------------------------------------------------- Write-Host "Installing Relenv: " -NoNewLine -pip install relenv --disable-pip-version-check | Out-Null +pip install relenv==$RelenvVersion --disable-pip-version-check | Out-Null $output = pip list --disable-pip-version-check if ("relenv" -in $output.split()) { Write-Result "Success" -ForegroundColor Green @@ -235,22 +260,23 @@ if ("relenv" -in $output.split()) { Write-Result "Failed" -ForegroundColor Red exit 1 } +$env:RELENV_FETCH_VERSION=$RelenvVersion #------------------------------------------------------------------------------- # Building Python with Relenv #------------------------------------------------------------------------------- if ( $Build ) { Write-Host "Building Python with Relenv (long-running): " -NoNewLine - $output = relenv build --clean --arch $ARCH + $output = relenv build --clean --python $Version --arch $ARCH } else { Write-Host "Fetching Python with Relenv: " -NoNewLine relenv fetch --python $Version --arch $ARCH | Out-Null -} -if ( Test-Path -Path "$RELENV_DIR\build\$Version-$ARCH-win.tar.xz") { - Write-Result "Success" -ForegroundColor Green -} else { - Write-Result "Failed" -ForegroundColor Red - exit 1 + if ( Test-Path -Path "$RELENV_DIR\build\$Version-$ARCH-win.tar.xz") { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } } #------------------------------------------------------------------------------- @@ -265,24 +291,6 @@ If ( Test-Path -Path "$BLD_PY_BIN" ) { exit 1 } -#------------------------------------------------------------------------------- -# Retrieving SSL Libraries -#------------------------------------------------------------------------------- -$ssllibs = "libeay32.dll", - "ssleay32.dll" -$ssllibs | ForEach-Object { - $url = "$SALT_DEP_URL/openssl/1.1.1s/$_" - $file = "$SCRIPTS_DIR\$_" - Write-Host "Retrieving $_`: " -NoNewline - Invoke-WebRequest -Uri "$url" -OutFile "$file" | Out-Null - if ( Test-Path -Path "$file" ) { - Write-Result "Success" -ForegroundColor Green - } else { - Write-Result "Failed" -ForegroundColor Red - exit 1 - } -} - #------------------------------------------------------------------------------- # Removing Unneeded files from Python #------------------------------------------------------------------------------- diff --git a/pkg/windows/clean.ps1 b/pkg/windows/clean.ps1 index 7d2234ad6b8d..466cf812dcc9 100644 --- a/pkg/windows/clean.ps1 +++ b/pkg/windows/clean.ps1 @@ -140,6 +140,33 @@ if ( Test-Path -Path "$RELENV_DIR" ) { } } +#------------------------------------------------------------------------------- +# Remove MSI build files +#------------------------------------------------------------------------------- +$files = @( + "msi/CustomAction01/CustomAction01.CA.dll", + "msi/CustomAction01/CustomAction01.dll", + "msi/CustomAction01/CustomAction01.pdb", + "msi/Product-discovered-files-config.wixobj", + "msi/Product-discovered-files-config.wxs", + "msi/Product-discovered-files-x64.wixobj", + "msi/Product-discovered-files-x64.wxs", + "msi/Product.wixobj" +) +$files | ForEach-Object { + if ( Test-Path -Path "$SCRIPT_DIR\$_" ) { + # Use .net, the powershell function is asynchronous + Write-Host "Removing $_`: " -NoNewline + [System.IO.File]::Delete("$SCRIPT_DIR\$_") + if ( ! (Test-Path -Path "$SCRIPT_DIR\$_") ) { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } + } +} + #------------------------------------------------------------------------------- # Script Completed #------------------------------------------------------------------------------- diff --git a/pkg/windows/install_salt.ps1 b/pkg/windows/install_salt.ps1 index 34fcdf927ddf..3be6e6f18da1 100644 --- a/pkg/windows/install_salt.ps1 +++ b/pkg/windows/install_salt.ps1 @@ -97,7 +97,7 @@ if ( ! $SkipInstall ) { Write-Host $("-" * 80) #------------------------------------------------------------------------------- - # Installing Salt + # Preparing to Install Salt #------------------------------------------------------------------------------- # We don't want to use an existing salt installation because we don't know what # it is diff --git a/pkg/windows/msi/README.md b/pkg/windows/msi/README.md index 9e2aba66f001..f2564089d19f 100644 --- a/pkg/windows/msi/README.md +++ b/pkg/windows/msi/README.md @@ -4,66 +4,71 @@ The installer offers properties for unattended/silent installations. Example: install silently, set the master, don't start the service: +In cmd: > msiexec /i *.msi MASTER=salt2 START_MINION="" -Example: uninstall and remove configuration +In powershell (you have to escape the quotes to disable starting the minion service): +> msiexec /i *.msi MASTER=salt2 START_MINION=\`"\`" +Example: uninstall and remove configuration > MsiExec.exe /X *.msi REMOVE_CONFIG=1 ## Notes - The installer requires a privileged user - Properties must be upper case -- Values of properties are case sensitve -- Values must be quoted when they contain whitespace, or to unset a property, as in `START_MINION=""` -- Creates a verbose log file, by default `%TEMP%\MSIxxxxx.LOG`, where xxxxx is random. The name of the log can be specified with `msiexec /log example.log` -- extends the system `PATH` environment variable +- Values of properties are case sensitive +- Values must be quoted when they contain whitespace, or to unset a property, as in ``START_MINION=""`` +- In powershell, you must escape the quotes with a back tick for an empty string, ie: ``START_MINION=`"`"`` +- ``/l*v`` Creates a verbose log file, by default ``%TEMP%\MSIxxxxx.LOG``, where xxxxx is random. The name of the log can be specified with ``msiexec /l*v example.log`` +- ``/qn`` or ``/quiet`` installs quietly, suppressing all dialog boxes +- ``/qb`` or ``/passive`` installs quietly but displays a simple progress bar ## Properties - Property | Default value | Comment - ---------------------- | ----------------------- | ------ - `MASTER` | `salt` | The master (name or IP). Separate multiple masters by comma. - `MASTER_KEY` | | The master public key. See below. - `MINION_ID` | Hostname | The minion id. - `MINION_CONFIG` | | Content to be written to the `minion` config file. See below. - `START_MINION` | `1` | Set to `""` to prevent the start of the `salt-minion` service. - `MOVE_CONF` | | Set to `1` to move configuration from `C:\salt` to `%ProgramData%`. - `REMOVE_CONFIG` | | Set to `1` to remove configuration on uninstall. Implied by `MINION_CONFIG`. - `CLEAN_INSTALL` | | Set to `1` to remove configuration and cache before install or upgrade. - `CONFIG_TYPE` | `Existing` | Set to `Custom` or `Default` for scenarios below. - `CUSTOM_CONFIG` | | Name of a custom config file in the same path as the installer or full path. Requires `CONFIG_TYPE=Custom`. __ONLY FROM COMMANDLINE__ - `INSTALLDIR` | Windows default | Where to install binaries. - `ROOTDIR` | `C:\ProgramData\Salt Project\Salt` | Where to install configuration. - `ARPSYSTEMCOMPONENT` | | Set to `1` to hide "Salt Minion" in "Programs and Features". + Property | Default value | Comment + ------------------------ | ----------------------- | ------ + ``MASTER`` | ``salt`` | The master (name or IP). Separate multiple masters by comma. + ``MASTER_KEY`` | | The master public key. See below. + ``MINION_ID`` | Hostname | The minion id. + ``MINION_CONFIG`` | | Content to be written to the `minion` config file. See below. + ``START_MINION`` | ``1`` | Set to ``""`` to prevent the start of the ``salt-minion`` service. In powershell you must excape each quotation mark with a back tick (`` `"`" ``) + ``MOVE_CONF`` | | Set to ``1`` to move configuration from ``C:\salt`` to ``%ProgramData%``. + ``REMOVE_CONFIG`` | | Set to ``1`` to remove configuration on uninstall. Implied by ``MINION_CONFIG``. + ``CLEAN_INSTALL`` | | Set to ``1`` to remove configuration and cache before install or upgrade. + ``CONFIG_TYPE`` | ``Existing`` | Set to ``Custom`` or ``Default`` for scenarios below. + ``CUSTOM_CONFIG`` | | Name of a custom config file in the same path as the installer or full path. Requires ``CONFIG_TYPE=Custom``. __ONLY FROM COMMANDLINE__ + ``INSTALLDIR`` | Windows default | Where to install binaries. + ``ROOTDIR`` | ``C:\ProgramData\Salt Project\Salt`` | Where to install configuration. + ``ARPSYSTEMCOMPONENT`` | | Set to ``1`` to hide "Salt Minion" in "Programs and Features". -Master and id are read from file `conf\minion` +Master and id are read from file ``conf\minion`` -You can set a master with `MASTER`. +You can set a master with ``MASTER``. -You can set a master public key with `MASTER_KEY`, after you converted it into one line like so: +You can set a master public key with ``MASTER_KEY``, after you converted it into one line like so: -- Remove the first and the last line (`-----BEGIN PUBLIC KEY-----` and `-----END PUBLIC KEY-----`). +- Remove the first and the last line (``-----BEGIN PUBLIC KEY-----`` and ``-----END PUBLIC KEY-----``). - Remove linebreaks. -### Property `MINION_CONFIG` +### Property ``MINION_CONFIG`` -If `MINION_CONFIG` is set: +If ``MINION_CONFIG`` is set: -- Its content is written to configuraton file `conf\minion`, with `^` replaced by line breaks +- Its content is written to configuration file ``conf\minion``, with ``^`` replaced by line breaks - All prior configuration is deleted: - - all `minion.d\*.conf` files - - the `minion_id` file -- Implies `REMOVE_CONFIG=1`: uninstall will remove all configuration. + - all ``minion.d\*.conf`` files + - the ``minion_id`` file +- Implies ``REMOVE_CONFIG=1``: uninstall will remove all configuration. -Example `MINION_CONFIG="master: Anna^id: Bob"` results in: +Example ``MINION_CONFIG="master: Anna^id: Bob"`` results in: master: Anna id: Bob -### Property `CONFIG_TYPE` +### Property ``CONFIG_TYPE`` There are 3 scenarios the installer tries to account for: @@ -75,30 +80,30 @@ Existing This setting makes no changes to the existing config and just upgrades/downgrades salt. Makes for easy upgrades. Just run the installer with a silent option. -If there is no existing config, then the default is used and `master` and `minion id` are applied if passed. +If there is no existing config, then the default is used and ``master`` and ``minion id`` are applied if passed. Custom This setting will lay down a custom config passed via the command line. Since we want to make sure the custom config is applied correctly, we'll need to back up any existing config. -1. `minion` config renamed to `minion-.bak` -2. `minion_id` file renamed to `minion_id-.bak` -3. `minion.d` directory renamed to `minion.d-.bak` -Then the custom config is laid down by the installer... and `master` and `minion id` should be applied to the custom config if passed. +1. ``minion`` config renamed to ``minion-.bak`` +2. ``minion_id`` file renamed to ``minion_id-.bak`` +3. ``minion.d`` directory renamed to ``minion.d-.bak`` +Then the custom config is laid down by the installer... and ``master`` and ``minion id`` should be applied to the custom config if passed. Default This setting will reset config to be the default config contained in the pkg. Therefore, all existing config files should be backed up -1. `minion` config renamed to `minion-.bak` -2. `minion_id` file renamed to `minion_id-.bak` -3. `minion.d` directory renamed to `minion.d-.bak` -Then the default config file is laid down by the installer... settings for `master` and `minion id` should be applied to the default config if passed +1. ``minion`` config renamed to ``minion-.bak`` +2. ``minion_id`` file renamed to ``minion_id-.bak`` +3. ``minion.d`` directory renamed to ``minion.d-.bak`` +Then the default config file is laid down by the installer... settings for ``master`` and ``minion id`` should be applied to the default config if passed ### Previous installation in C:\salt and how to install into C:\salt -A previous installation or configuration in `C:\salt` causes an upgrade into `C:\salt`, unless you set `MOVE_CONF=1`. -Set the two properties `INSTALLDIR=c:\salt ROOTDIR=c:\salt` to install binaries and configuration into `C:\salt`. +A previous installation or configuration in ``C:\salt`` causes an upgrade into ``C:\salt``, unless you set ``MOVE_CONF=1``. +Set the two properties ``INSTALLDIR=c:\salt ROOTDIR=c:\salt`` to install binaries and configuration into ``C:\salt``. ## Client requirements diff --git a/pkg/windows/msi/build_pkg.ps1 b/pkg/windows/msi/build_pkg.ps1 index 37fdd5f34834..6a6176a2d4e3 100644 --- a/pkg/windows/msi/build_pkg.ps1 +++ b/pkg/windows/msi/build_pkg.ps1 @@ -73,17 +73,19 @@ function VerifyOrDownload ($local_file, $URL, $SHA256) { # Script Variables #------------------------------------------------------------------------------- -$WEBCACHE_DIR = "$env:TEMP\msi_build_cache_dir" -$DEPS_URL = "http://repo.saltproject.io/windows/dependencies" -$PROJECT_DIR = $(git rev-parse --show-toplevel) -$BUILD_DIR = "$PROJECT_DIR\pkg\windows\build" -$BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv" -$SCRIPTS_DIR = "$BUILDENV_DIR\Scripts" -$PYTHON_BIN = "$SCRIPTS_DIR\python.exe" -$BUILD_ARCH = $(. $PYTHON_BIN -c "import platform; print(platform.architecture()[0])") -$SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName -$RUNTIME_DIR = [System.Runtime.InteropServices.RuntimeEnvironment]::GetRuntimeDirectory() -$CSC_BIN = "$RUNTIME_DIR\csc.exe" +$WEBCACHE_DIR = "$env:TEMP\msi_build_cache_dir" +$DEPS_URL = "https://repo.saltproject.io/windows/dependencies" +$PROJECT_DIR = $(git rev-parse --show-toplevel) +$BUILD_DIR = "$PROJECT_DIR\pkg\windows\build" +$BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv" +$SCRIPTS_DIR = "$BUILDENV_DIR\Scripts" +$SITE_PKGS_DIR = "$BUILDENV_DIR\Lib\site-packages" +$BUILD_SALT_DIR = "$SITE_PKGS_DIR\salt" +$PYTHON_BIN = "$SCRIPTS_DIR\python.exe" +$BUILD_ARCH = $(. $PYTHON_BIN -c "import platform; print(platform.architecture()[0])") +$SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName +$RUNTIME_DIR = [System.Runtime.InteropServices.RuntimeEnvironment]::GetRuntimeDirectory() +$CSC_BIN = "$RUNTIME_DIR\csc.exe" if ( $BUILD_ARCH -eq "64bit" ) { $BUILD_ARCH = "AMD64" @@ -263,6 +265,214 @@ Write-Host "Packaging *.dll's to *.CA.dll: " -NoNewline "$SCRIPT_DIR\CustomAction01\CustomAction.config" > build.tmp CheckExitCode +#------------------------------------------------------------------------------- +# Remove Non-Windows Execution Modules +#------------------------------------------------------------------------------- +Write-Host "Removing Non-Windows Execution Modules: " -NoNewline +$modules = "acme", + "aix", + "alternatives", + "apcups", + "apf", + "apt", + "arista", + "at", + "bcache", + "blockdev", + "bluez", + "bridge", + "bsd", + "btrfs", + "ceph", + "container_resource", + "cron", + "csf", + "daemontools", + "deb*", + "devmap", + "dpkg", + "ebuild", + "eix", + "eselect", + "ethtool", + "extfs", + "firewalld", + "freebsd", + "genesis", + "gentoo", + "glusterfs", + "gnomedesktop", + "groupadd", + "grub_legacy", + "guestfs", + "htpasswd", + "ilo", + "img", + "incron", + "inspector", + "ipset", + "iptables", + "iwtools", + "k8s", + "kapacitor", + "keyboard", + "keystone", + "kmod", + "layman", + "linux", + "localemod", + "locate", + "logadm", + "logrotate", + "lvs", + "lxc", + "mac", + "makeconf", + "mdadm", + "mdata", + "monit", + "moosefs", + "mount", + "napalm", + "netbsd", + "netscaler", + "neutron", + "nfs3", + "nftables", + "nova", + "nspawn", + "openbsd", + "openstack", + "openvswitch", + "opkg", + "pacman", + "parallels", + "parted", + "pcs", + "pkgin", + "pkgng", + "pkgutil", + "portage_config", + "postfix", + "poudriere", + "powerpath", + "pw_", + "qemu_", + "quota", + "redismod", + "restartcheck", + "rh_", + "riak", + "rpm", + "runit", + "s6", + "scsi", + "sensors", + "service", + "shadow", + "smartos", + "smf", + "snapper", + "solaris", + "solr", + "ssh_", + "supervisord", + "sysbench", + "sysfs", + "sysrc", + "system", + "test_virtual", + "timezone", + "trafficserver", + "tuned", + "udev", + "upstart", + "useradd", + "uswgi", + "varnish", + "vbox", + "virt", + "xapi", + "xbpspkg", + "xfs", + "yum*", + "zfs", + "znc", + "zpool", + "zypper" +$modules | ForEach-Object { + Remove-Item -Path "$BUILD_SALT_DIR\modules\$_*" -Recurse + if ( Test-Path -Path "$BUILD_SALT_DIR\modules\$_*" ) { + Write-Result "Failed" -ForegroundColor Red + Write-Host "Failed to remove: $BUILD_SALT_DIR\modules\$_" + exit 1 + } +} +Write-Result "Success" -ForegroundColor Green + +#------------------------------------------------------------------------------- +# Remove Non-Windows State Modules +#------------------------------------------------------------------------------- +Write-Host "Removing Non-Windows State Modules: " -NoNewline +$states = "acme", + "alternatives", + "apt", + "at", + "blockdev", + "ceph", + "cron", + "csf", + "deb", + "eselect", + "ethtool", + "firewalld", + "glusterfs", + "gnome", + "htpasswd", + "incron", + "ipset", + "iptables", + "k8s", + "kapacitor", + "keyboard", + "keystone", + "kmod", + "layman", + "linux", + "lxc", + "mac", + "makeconf", + "mdadm", + "monit", + "mount", + "nftables", + "pcs", + "pkgng", + "portage", + "powerpath", + "quota", + "redismod", + "smartos", + "snapper", + "ssh", + "supervisord", + "sysrc", + "trafficserver", + "tuned", + "vbox", + "virt.py", + "zfs", + "zpool" +$states | ForEach-Object { + Remove-Item -Path "$BUILD_SALT_DIR\states\$_*" -Recurse + if ( Test-Path -Path "$BUILD_SALT_DIR\states\$_*" ) { + Write-Result "Failed" -ForegroundColor Red + Write-Host "Failed to remove: $BUILD_SALT_DIR\states\$_" + exit 1 + } +} +Write-Result "Success" -ForegroundColor Green + # move conf folder up one dir because it must not be discovered twice and xslt is difficult Write-Host "Remove configs from discovery: " -NoNewline Move-Item -Path "$DISCOVER_CONFDIR" ` diff --git a/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python.exe b/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python.exe index 9b23b62b0159..014864b41e75 100644 Binary files a/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python.exe and b/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python.exe differ diff --git a/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python3.dll b/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python3.dll index f94b2d838b93..e67fb8181c49 100644 Binary files a/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python3.dll and b/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python3.dll differ diff --git a/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python310.dll b/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python310.dll new file mode 100644 index 000000000000..ac92b83a2000 Binary files /dev/null and b/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python310.dll differ diff --git a/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python38.dll b/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python38.dll deleted file mode 100644 index 53e7b10e6bb2..000000000000 Binary files a/pkg/windows/msi/tests/_mock_files/buildenv/Scripts/python38.dll and /dev/null differ diff --git a/pkg/windows/msi/tests/config_tests/default_config_master.test b/pkg/windows/msi/tests/config_tests/default_config_master.test index 1a5c9ba08fb4..6288dae5af6d 100644 --- a/pkg/windows/msi/tests/config_tests/default_config_master.test +++ b/pkg/windows/msi/tests/config_tests/default_config_master.test @@ -1,2 +1,2 @@ -properties CONFIG_TYPE=Default START_MINION="" MASTER=cli.master +properties START_MINION="" CONFIG_TYPE=Default MASTER=cli.master dormant diff --git a/pkg/windows/msi/tests/config_tests/default_config_minion.test b/pkg/windows/msi/tests/config_tests/default_config_minion.test index f0bb41292881..5ffbbf80d0cd 100644 --- a/pkg/windows/msi/tests/config_tests/default_config_minion.test +++ b/pkg/windows/msi/tests/config_tests/default_config_minion.test @@ -1,2 +1,2 @@ -properties CONFIG_TYPE=Default START_MINION="" MINION_ID=cli.minion +properties START_MINION="" CONFIG_TYPE=Default MINION_ID=cli.minion dormant diff --git a/pkg/windows/msi/tests/config_tests/default_config_multi_master.test b/pkg/windows/msi/tests/config_tests/default_config_multi_master.test index d788ac44d7ec..655a9e8da684 100644 --- a/pkg/windows/msi/tests/config_tests/default_config_multi_master.test +++ b/pkg/windows/msi/tests/config_tests/default_config_multi_master.test @@ -1,2 +1,2 @@ -properties CONFIG_TYPE=Default START_MINION="" MASTER=cli.master1,cli.master2 +properties START_MINION="" CONFIG_TYPE=Default MASTER=cli.master1,cli.master2 dormant diff --git a/pkg/windows/msi/tests/config_tests/default_config_multi_master_spaces.test b/pkg/windows/msi/tests/config_tests/default_config_multi_master_spaces.test index a7c851c6f084..4a4296321767 100644 --- a/pkg/windows/msi/tests/config_tests/default_config_multi_master_spaces.test +++ b/pkg/windows/msi/tests/config_tests/default_config_multi_master_spaces.test @@ -1,2 +1,2 @@ -properties CONFIG_TYPE=Default START_MINION="" MASTER="cli.master1 cli.master2" +properties START_MINION="" CONFIG_TYPE=Default MASTER="cli.master1 cli.master2" dormant diff --git a/pkg/windows/msi/tests/config_tests/remove_config_custom_config.test b/pkg/windows/msi/tests/config_tests/remove_config_custom_config.test index 483ce5d54817..32a73b4b661e 100644 --- a/pkg/windows/msi/tests/config_tests/remove_config_custom_config.test +++ b/pkg/windows/msi/tests/config_tests/remove_config_custom_config.test @@ -1 +1 @@ -properties CONFIG_TYPE=Custom CUSTOM_CONFIG=tests\config_tests\remove_config_custom_config.conf START_MINION="" REMOVE_CONFIG=1 +properties START_MINION="" CONFIG_TYPE=Custom CUSTOM_CONFIG=tests\config_tests\remove_config_custom_config.conf REMOVE_CONFIG=1 diff --git a/pkg/windows/msi/tests/config_tests/remove_config_default_config.test b/pkg/windows/msi/tests/config_tests/remove_config_default_config.test index bf598da37c7f..9b856a4a8538 100644 --- a/pkg/windows/msi/tests/config_tests/remove_config_default_config.test +++ b/pkg/windows/msi/tests/config_tests/remove_config_default_config.test @@ -1 +1 @@ -properties CONFIG_TYPE=Default START_MINION="" REMOVE_CONFIG=1 +properties START_MINION="" CONFIG_TYPE=Default REMOVE_CONFIG=1 diff --git a/pkg/windows/multi-minion.cmd b/pkg/windows/multi-minion.cmd new file mode 100644 index 000000000000..3142158b469b --- /dev/null +++ b/pkg/windows/multi-minion.cmd @@ -0,0 +1,5 @@ +:: This is a helper script for multi-minion.ps1. +:: See multi-minion.ps1 for documentation +@ echo off +Set "CurDir=%~dp0" +PowerShell -ExecutionPolicy RemoteSigned -File "%CurDir%\multi-minion.ps1" %* diff --git a/pkg/windows/multi-minion.ps1 b/pkg/windows/multi-minion.ps1 new file mode 100644 index 000000000000..8ad709c04ccc --- /dev/null +++ b/pkg/windows/multi-minion.ps1 @@ -0,0 +1,363 @@ +<# +.SYNOPSIS +Script for setting up an additional salt-minion on a machine with Salt installed + +.DESCRIPTION +This script configures an additional minion on a machine that already has a Salt +installation using one of the Salt packages. It sets up the directory structure +required by Salt. It also lays down a minion config to be used +by the Salt minion. Additionaly, this script can start the new minion in a +hidden window. + +You can also remove the multiminion setup with this script. + +This script does not need to be run with Administrator privileges + +If a minion that was configured with this script is already running, the script +will exit. + +The following example sets up a minion for the current logged in account. It +configures the minion to connect to the master at 192.168.0.10 + +.EXAMPLE +PS>multi-minion.ps1 -Master 192.168.0.10 +PS>multi-minion.ps1 -m 192.168.0.10 + +The following example sets up a minion for the current logged in account. It +configures the minion to connect to the master at 192.168.0.10. It also prefixes +the minion id with `spongebob` + +.EXAMPLE +PS>multi-minion.ps1 -Master 192.168.0.10 -Prefix spongebob +PS>multi-minion.ps1 -m 192.168.0.10 -p spongebob + +The following example sets up a minion for the current logged in account. It +configures the minion to connect to the master at 192.168.0.10. It also starts +the minion in a hidden window: + +.EXAMPLE +PS>multi-minion.ps1 -Master 192.168.0.10 -Start +PS>multi-minion.ps1 -m 192.168.0.10 -s + +The following example removes a multiminion for the current running account: + +.EXAMPLE +PS>multi-minion.ps1 -Delete +PS>multi-minion.ps1 -d + +#> + +[CmdletBinding()] +param( + + [Parameter(Mandatory=$false)] + [Alias("m")] + # The master to connect to. This can be an ip address or an fqdn. Default + # is salt + [String] $Master = "salt", + + [Parameter(Mandatory=$false)] + [Alias("p")] + # The prefix to the minion id to differentiate it from the installed system + # minion. The default is $env:COMPUTERNAME. It might be helpful to use the + # minion id of the system minion if you know it + [String] $Prefix = "$env:COMPUTERNAME", + + [Parameter(Mandatory=$false)] + [Alias("s")] + # Start the minion in the background + [Switch] $Start, + + [Parameter(Mandatory=$false)] + [Alias("l")] + [ValidateSet( + "all", + "garbage", + "trace", + "debug", + "profile", + "info", + "warning", + "error", + "critical", + "quiet" + )] + # Set the log level for log file. Default is `warning` + [String] $LogLevel = "warning", + + [Parameter(Mandatory=$false)] + [Alias("d")] + # Remove the multi-minion in the current account. All other parameters are + # ignored + [Switch] $Remove +) + +########################### Script Variables ############################# +$user_name = [System.Security.Principal.WindowsIdentity]::GetCurrent().Name.Split("\")[-1].ToLower() +$salt_bin = "$env:ProgramFiles\Salt Project\Salt\salt-minion.exe" +$root_dir = "$env:LocalAppData\Salt Project\Salt" +$cache_dir = "$root_dir\var\cache\salt\minion" +$minion_id = "$Prefix-$user_name" + +########################### Script Functions ############################# +function Test-FileLock { + param ( + [parameter(Mandatory=$true)] + # The path to the file to check + [string]$Path + ) + if ((Test-Path -Path $Path) -eq $false) { + return $false + } + $oFile = New-Object System.IO.FileInfo $Path + try { + $oStream = $oFile.Open([System.IO.FileMode]::Open, [System.IO.FileAccess]::ReadWrite, [System.IO.FileShare]::None) + if ($oStream) { + $oStream.Close() + } + return $false + } catch { + # file is locked by a process. + return $true + } +} + +################################ Remove ################################## +if ( $Remove ) { + Write-Host "######################################################################" -ForegroundColor Cyan + Write-Host "Removing multi-minion" + Write-Host "Root Dir: $root_dir" + Write-Host "######################################################################" -ForegroundColor Cyan + + # Stop salt-minion service if running + $processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle + $processes | ForEach-Object { + if ( $_.commandline -like "*$root_dir*" ) { + Write-Host "Killing process: " -NoNewline + $process = Get-Process -Id $_.handle + $process.Kill() + if ( $process.HasExited ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } + } + } + + # Check for locked log file + # The log file will be locked until the running process releases it + while (Test-FileLock -Path "$root_dir\var\log\salt\minion") { + Start-Sleep -Seconds 1 + } + + # Remove Directory + if ( Test-Path -Path $root_dir) { + Write-Host "Removing Root Dir: " -NoNewline + Remove-Item -Path $root_dir -Force -Recurse + + if ( !(Test-Path -Path $root_dir) ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } + } + # Remind to delete keys from master + Write-Host "######################################################################" -ForegroundColor Cyan + Write-Host "Multi-Minion successfully removed" + Write-Host ">>>>> Don't forget to remove keys from the master <<<<<" + Write-Host "######################################################################" -ForegroundColor Cyan + exit 0 +} + +################################ EXISTING CHECK ################################ + +# See there is already a running minion +$running = $false +$processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle +$processes | ForEach-Object { + if ( $_.commandline -like "*$root_dir*" ) { + $running = $true + } +} +if ( $running ) { + Write-Host "######################################################################" -ForegroundColor Cyan + Write-Host "Multi-Minion" + Write-Host "A minion is already running for this user" + Write-Host "######################################################################" -ForegroundColor Cyan + exit 0 +} + +################################### INSTALL #################################### + +Write-Host "######################################################################" -ForegroundColor Cyan +Write-Host "Installing Multi-Minion" +Write-Host "Master: $Master" +Write-Host "Minion ID: $minion_id" +Write-Host "Root Directory: $root_dir" +Write-Host "######################################################################" -ForegroundColor Cyan + +# Create Root Directory Structure +if ( !( Test-Path -path "$root_dir" ) ) { + Write-Host "Creating Root Dir: " -NoNewline + New-Item -Path "$root_dir" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Config dir +if ( !( Test-Path -path "$root_dir\conf" ) ) { + Write-Host "Creating config dir: " -NoNewline + New-Item -Path "$root_dir\conf" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\conf" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Minion.d dir +if ( !( Test-Path -path "$root_dir\conf\minion.d" ) ) { + Write-Host "Creating minion.d dir: " -NoNewline + New-Item -Path "$root_dir\conf\minion.d" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\conf\minion.d" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# PKI dir +if ( !( Test-Path -path "$root_dir\conf\pki" ) ) { + Write-Host "Creating pki dir: " -NoNewline + New-Item -Path "$root_dir\conf\pki" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\conf\pki" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Log dir +if ( !( Test-Path -path "$root_dir\var\log\salt" ) ) { + Write-Host "Creating log dir: " -NoNewline + New-Item -Path "$root_dir\var\log\salt" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\var\log\salt" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Run dir +if ( !( Test-Path -path "$root_dir\var\run" ) ) { + Write-Host "Creating run dir: " -NoNewline + New-Item -Path "$root_dir\var\run" -Type Directory | Out-Null + if ( Test-Path -path "$root_dir\var\run" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Extmods grains dir +if ( !( Test-Path -path "$cache_dir\extmods\grains" ) ) { + Write-Host "Creating extmods grains dir: " -NoNewline + New-Item -Path "$cache_dir\extmods\grains" -Type Directory | Out-Null + if ( Test-Path -path "$cache_dir\extmods\grains" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Proc dir +if ( !( Test-Path -path "$cache_dir\proc" ) ) { + Write-Host "Creating proc dir: " -NoNewline + New-Item -Path "$cache_dir\proc" -Type Directory | Out-Null + if ( Test-Path -path "$cache_dir\proc" ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +# Write minion config +Write-Host "Writing minion config: " -NoNewline +Set-Content -Force -Path "$root_dir\conf\minion" -Value "master: $Master" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "id: $minion_id" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "root_dir: $root_dir" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "log_file: $root_dir\var\log\salt\minion" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "log_level_logfile: $LogLevel" + +Add-Content -Force -Path "$root_dir\conf\minion" -Value "utils_dirs:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\var\cache\salt\minion\extmods\utils" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "winrepo_dir: $root_dir\srv\salt\win\repo" +Add-Content -Force -Path "$root_dir\conf\minion" -Value "winrepo_dir_ng: $root_dir\srv\salt\win\repo-ng" + +Add-Content -Force -Path "$root_dir\conf\minion" -Value "file_roots:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\salt" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\spm\salt" + +Add-Content -Force -Path "$root_dir\conf\minion" -Value "pillar_roots:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\pillar" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\spm\pillar" + +Add-Content -Force -Path "$root_dir\conf\minion" -Value "thorium_roots:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " base:" +Add-Content -Force -Path "$root_dir\conf\minion" -Value " - $root_dir\srv\thorium" + +if ( Test-Path -path "$root_dir\conf\minion" ) { + Write-Host "Success" -ForegroundColor Green +} else { + Write-Host "Failed" -ForegroundColor Red + exit 1 +} + +# Start the minion +if ( $Start ) { + Write-Host "Starting minion process: " -NoNewline + Start-Process -FilePath "`"$salt_bin`"" ` + -ArgumentList "-c","`"$root_dir\conf`"" ` + -WindowStyle Hidden + # Verify running minion + $running = $false + $processes = Get-WmiObject win32_process -filter "name like '%salt-minion%'" | Select-Object commandline,handle + $processes | ForEach-Object { + if ( $_.commandline -like "*$root_dir*" ) { + $running = $true + } + } + if ( $running ) { + Write-Host "Success" -ForegroundColor Green + } else { + Write-Host "Failed" -ForegroundColor Red + exit 1 + } +} + +Write-Host "######################################################################" -ForegroundColor Cyan +Write-Host "Multi-Minion installed successfully" +if ( ! $Start ) { + Write-Host "" + Write-Host "To start the minion, run the following command:" + Write-Host "salt-minion -c `"$root_dir\conf`"" + Write-Host "" + Write-Host "To start the minion in the background, run the following command:" + Write-Host "Start-Process -FilePath salt-minion.exe -ArgumentList `"-c`",'`"$root_dir\conf`"' -WindowStyle Hidden" +} +Write-Host "######################################################################" -ForegroundColor Cyan diff --git a/pkg/windows/nsis/build_pkg.ps1 b/pkg/windows/nsis/build_pkg.ps1 index 8c964014e7f1..2573e20998b6 100644 --- a/pkg/windows/nsis/build_pkg.ps1 +++ b/pkg/windows/nsis/build_pkg.ps1 @@ -51,17 +51,19 @@ function Write-Result($result, $ForegroundColor="Green") { # Script Variables #------------------------------------------------------------------------------- -$PROJECT_DIR = $(git rev-parse --show-toplevel) -$SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName -$BUILD_DIR = "$PROJECT_DIR\pkg\windows\build" -$BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv" -$INSTALLER_DIR = "$SCRIPT_DIR\installer" -$SCRIPTS_DIR = "$BUILDENV_DIR\Scripts" -$PYTHON_BIN = "$SCRIPTS_DIR\python.exe" -$PY_VERSION = [Version]((Get-Command $PYTHON_BIN).FileVersionInfo.ProductVersion) -$PY_VERSION = "$($PY_VERSION.Major).$($PY_VERSION.Minor)" -$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe" -$ARCH = $(. $PYTHON_BIN -c "import platform; print(platform.architecture()[0])") +$PROJECT_DIR = $(git rev-parse --show-toplevel) +$SCRIPT_DIR = (Get-ChildItem "$($myInvocation.MyCommand.Definition)").DirectoryName +$BUILD_DIR = "$PROJECT_DIR\pkg\windows\build" +$BUILDENV_DIR = "$PROJECT_DIR\pkg\windows\buildenv" +$INSTALLER_DIR = "$SCRIPT_DIR\installer" +$SCRIPTS_DIR = "$BUILDENV_DIR\Scripts" +$SITE_PKGS_DIR = "$BUILDENV_DIR\Lib\site-packages" +$BUILD_SALT_DIR = "$SITE_PKGS_DIR\salt" +$PYTHON_BIN = "$SCRIPTS_DIR\python.exe" +$PY_VERSION = [Version]((Get-Command $PYTHON_BIN).FileVersionInfo.ProductVersion) +$PY_VERSION = "$($PY_VERSION.Major).$($PY_VERSION.Minor)" +$NSIS_BIN = "$( ${env:ProgramFiles(x86)} )\NSIS\makensis.exe" +$ARCH = $(. $PYTHON_BIN -c "import platform; print(platform.architecture()[0])") if ( $ARCH -eq "64bit" ) { $ARCH = "AMD64" @@ -133,6 +135,214 @@ if ( Test-Path -Path "$INSTALLER_DIR\salt.ico" ) { exit 1 } +#------------------------------------------------------------------------------- +# Remove Non-Windows Execution Modules +#------------------------------------------------------------------------------- +Write-Host "Removing Non-Windows Execution Modules: " -NoNewline +$modules = "acme", + "aix", + "alternatives", + "apcups", + "apf", + "apt", + "arista", + "at", + "bcache", + "blockdev", + "bluez", + "bridge", + "bsd", + "btrfs", + "ceph", + "container_resource", + "cron", + "csf", + "daemontools", + "deb*", + "devmap", + "dpkg", + "ebuild", + "eix", + "eselect", + "ethtool", + "extfs", + "firewalld", + "freebsd", + "genesis", + "gentoo", + "glusterfs", + "gnomedesktop", + "groupadd", + "grub_legacy", + "guestfs", + "htpasswd", + "ilo", + "img", + "incron", + "inspector", + "ipset", + "iptables", + "iwtools", + "k8s", + "kapacitor", + "keyboard", + "keystone", + "kmod", + "layman", + "linux", + "localemod", + "locate", + "logadm", + "logrotate", + "lvs", + "lxc", + "mac", + "makeconf", + "mdadm", + "mdata", + "monit", + "moosefs", + "mount", + "napalm", + "netbsd", + "netscaler", + "neutron", + "nfs3", + "nftables", + "nova", + "nspawn", + "openbsd", + "openstack", + "openvswitch", + "opkg", + "pacman", + "parallels", + "parted", + "pcs", + "pkgin", + "pkgng", + "pkgutil", + "portage_config", + "postfix", + "poudriere", + "powerpath", + "pw_", + "qemu_", + "quota", + "redismod", + "restartcheck", + "rh_", + "riak", + "rpm", + "runit", + "s6", + "scsi", + "sensors", + "service", + "shadow", + "smartos", + "smf", + "snapper", + "solaris", + "solr", + "ssh_", + "supervisord", + "sysbench", + "sysfs", + "sysrc", + "system", + "test_virtual", + "timezone", + "trafficserver", + "tuned", + "udev", + "upstart", + "useradd", + "uswgi", + "varnish", + "vbox", + "virt", + "xapi", + "xbpspkg", + "xfs", + "yum*", + "zfs", + "znc", + "zpool", + "zypper" +$modules | ForEach-Object { + Remove-Item -Path "$BUILD_SALT_DIR\modules\$_*" -Recurse + if ( Test-Path -Path "$BUILD_SALT_DIR\modules\$_*" ) { + Write-Result "Failed" -ForegroundColor Red + Write-Host "Failed to remove: $BUILD_SALT_DIR\modules\$_" + exit 1 + } +} +Write-Result "Success" -ForegroundColor Green + +#------------------------------------------------------------------------------- +# Remove Non-Windows State Modules +#------------------------------------------------------------------------------- +Write-Host "Removing Non-Windows State Modules: " -NoNewline +$states = "acme", + "alternatives", + "apt", + "at", + "blockdev", + "ceph", + "cron", + "csf", + "deb", + "eselect", + "ethtool", + "firewalld", + "glusterfs", + "gnome", + "htpasswd", + "incron", + "ipset", + "iptables", + "k8s", + "kapacitor", + "keyboard", + "keystone", + "kmod", + "layman", + "linux", + "lxc", + "mac", + "makeconf", + "mdadm", + "monit", + "mount", + "nftables", + "pcs", + "pkgng", + "portage", + "powerpath", + "quota", + "redismod", + "smartos", + "snapper", + "ssh", + "supervisord", + "sysrc", + "trafficserver", + "tuned", + "vbox", + "virt.py", + "zfs", + "zpool" +$states | ForEach-Object { + Remove-Item -Path "$BUILD_SALT_DIR\states\$_*" -Recurse + if ( Test-Path -Path "$BUILD_SALT_DIR\states\$_*" ) { + Write-Result "Failed" -ForegroundColor Red + Write-Host "Failed to remove: $BUILD_SALT_DIR\states\$_" + exit 1 + } +} +Write-Result "Success" -ForegroundColor Green + #------------------------------------------------------------------------------- # Remove compiled files #------------------------------------------------------------------------------- diff --git a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi index 2377a2194677..2a957056f514 100644 --- a/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi +++ b/pkg/windows/nsis/installer/Salt-Minion-Setup.nsi @@ -1109,9 +1109,10 @@ Function ${un}uninstallSalt ${EndIf} # Remove files - Delete "$INSTDIR\uninst.exe" - Delete "$INSTDIR\ssm.exe" + Delete "$INSTDIR\multi-minion*" Delete "$INSTDIR\salt*" + Delete "$INSTDIR\ssm.exe" + Delete "$INSTDIR\uninst.exe" Delete "$INSTDIR\vcredist.exe" RMDir /r "$INSTDIR\DLLs" RMDir /r "$INSTDIR\Include" @@ -1189,6 +1190,20 @@ Function ${un}uninstallSalt ${Else} + # Prompt for the removal of the Installation Directory which contains + # the extras directory and the Root Directory which contains the config + # and pki directories. These directories will not be removed during + # an upgrade. + ${IfNot} $DeleteRootDir == 1 + MessageBox MB_YESNO|MB_DEFBUTTON2|MB_USERICON \ + "Would you like to completely remove the entire Salt \ + Installation? This includes the following:$\n\ + - Extra Pip Packages ($INSTDIR\extras-3.##)$\n\ + - Minion Config ($RootDir\conf)$\n\ + - Minion PKIs ($RootDir\conf\pki)"\ + /SD IDNO IDNO finished + ${EndIf} + # New Method Installation # This makes the $APPDATA variable point to the ProgramData folder instead # of the current user's roaming AppData folder @@ -1214,8 +1229,8 @@ Function ${un}uninstallSalt # Only delete Salt Project directory if it's in Program Files # Otherwise, we can't guess where the user may have installed salt ${GetParent} $INSTDIR $0 # Get parent directory (Salt Project) - ${If} $0 == "$ProgramFiles\Salt Project" # Make sure it's not ProgramFiles - ${OrIf} $0 == "$ProgramFiles64\Salt Project" # Make sure it's not Program Files (x86) + ${If} $0 == "$ProgramFiles\Salt Project" # Make sure it's ProgramFiles + ${OrIf} $0 == "$ProgramFiles64\Salt Project" # Make sure it's Program Files (x86) SetOutPath "$SysDrive" # Can't remove CWD RMDir /r $0 ${EndIf} @@ -1228,15 +1243,6 @@ Function ${un}uninstallSalt # Expand any environment variables ExpandEnvStrings $RootDir $RootDir - # Prompt for the removal of the Root Directory which contains the config - # and pki directories - ${IfNot} $DeleteRootDir == 1 - MessageBox MB_YESNO|MB_DEFBUTTON2|MB_USERICON \ - "Would you like to completely remove the Root Directory \ - ($RootDir) and all of its contents?" \ - /SD IDNO IDNO finished - ${EndIf} - # Remove the Salt Project directory in ProgramData # The Salt Project directory will only ever be in ProgramData # It is not user selectable @@ -1837,8 +1843,8 @@ Function un.parseUninstallerCommandLineSwitches $\n$\t$\tare the same (C:\salt)\ $\n\ $\n/delete-root-dir$\tDelete the root directory that contains the config\ - $\n$\t$\tand pki directories. Default is to not delete the root\ - $\n$\t$\tdirectory\ + $\n$\t$\tand pki directories. Also removes the installation directory\ + $\n$\t$\tincluding the extras directory. Default is to not delete\ $\n\ $\n$\t$\tThis applies to new method installations where the\ $\n$\t$\troot directory is in ProgramData and the installation\ diff --git a/pkg/windows/prep_salt.ps1 b/pkg/windows/prep_salt.ps1 index c9de14aa554b..74497648482d 100644 --- a/pkg/windows/prep_salt.ps1 +++ b/pkg/windows/prep_salt.ps1 @@ -153,7 +153,8 @@ if ( $PKG ) { } } -if ( $PKG ) { +# Make sure ssm.exe is present. This is needed for VMtools +if ( ! (Test-Path -Path "$BUILD_DIR\ssm.exe") ) { Write-Host "Copying SSM to Root: " -NoNewline Invoke-WebRequest -Uri "$SALT_DEP_URL/ssm-2.24-103-gdee49fc.exe" -OutFile "$BUILD_DIR\ssm.exe" if ( Test-Path -Path "$BUILD_DIR\ssm.exe" ) { @@ -164,6 +165,25 @@ if ( $PKG ) { } } +# Copy the multiminion scripts to the Build directory +$scripts = @( + "multi-minion.cmd", + "multi-minion.ps1" +) +$scripts | ForEach-Object { + if (!(Test-Path -Path "$BUILD_DIR\$_")) { + Write-Host "Copying $_ to the Build directory: " -NoNewline + Copy-Item -Path "$SCRIPT_DIR\$_" -Destination "$BUILD_DIR\$_" + if (Test-Path -Path "$BUILD_DIR\$_") { + Write-Result "Success" -ForegroundColor Green + } else { + Write-Result "Failed" -ForegroundColor Red + exit 1 + } + } +} + +# Copy VCRedist 2013 to the prereqs directory New-Item -Path $PREREQ_DIR -ItemType Directory | Out-Null Write-Host "Copying VCRedist 2013 $ARCH_X to prereqs: " -NoNewline $file = "vcredist_$ARCH_X`_2013.exe" @@ -175,6 +195,7 @@ if ( Test-Path -Path "$PREREQ_DIR\$file" ) { exit 1 } +# Copy Universal C Runtimes to the prereqs directory Write-Host "Copying Universal C Runtimes $ARCH_X to prereqs: " -NoNewline $file = "ucrt_$ARCH_X.zip" Invoke-WebRequest -Uri "$SALT_DEP_URL/$file" -OutFile "$PREREQ_DIR\$file" @@ -257,215 +278,6 @@ $directories | ForEach-Object { } } -#------------------------------------------------------------------------------- -# Remove Non-Windows Execution Modules -#------------------------------------------------------------------------------- -Write-Host "Removing Non-Windows Execution Modules: " -NoNewline -$modules = "acme", - "aix", - "alternatives", - "apcups", - "apf", - "apt", - "arista", - "at", - "bcache", - "blockdev", - "bluez", - "bridge", - "bsd", - "btrfs", - "ceph", - "container_resource", - "cron", - "csf", - "daemontools", - "deb*", - "devmap", - "dpkg", - "ebuild", - "eix", - "eselect", - "ethtool", - "extfs", - "firewalld", - "freebsd", - "genesis", - "gentoo", - "glusterfs", - "gnomedesktop", - "groupadd", - "grub_legacy", - "guestfs", - "htpasswd", - "ilo", - "img", - "incron", - "inspector", - "ipset", - "iptables", - "iwtools", - "k8s", - "kapacitor", - "keyboard", - "keystone", - "kmod", - "layman", - "linux", - "localemod", - "locate", - "logadm", - "logrotate", - "lvs", - "lxc", - "mac", - "makeconf", - "mdadm", - "mdata", - "monit", - "moosefs", - "mount", - "napalm", - "netbsd", - "netscaler", - "neutron", - "nfs3", - "nftables", - "nova", - "nspawn", - "openbsd", - "openstack", - "openvswitch", - "opkg", - "pacman", - "parallels", - "parted", - "pcs", - "pkgin", - "pkgng", - "pkgutil", - "portage_config", - "postfix", - "poudriere", - "powerpath", - "pw_", - "qemu_", - "quota", - "redismod", - "restartcheck", - "rh_", - "riak", - "rpm", - "runit", - "s6", - "scsi", - "seed", - "sensors", - "service", - "shadow", - "smartos", - "smf", - "snapper", - "solaris", - "solr", - "ssh_", - "supervisord", - "sysbench", - "sysfs", - "sysrc", - "system", - "test_virtual", - "timezone", - "trafficserver", - "tuned", - "udev", - "upstart", - "useradd", - "uswgi", - "varnish", - "vbox", - "virt", - "xapi", - "xbpspkg", - "xfs", - "yum*", - "zfs", - "znc", - "zpool", - "zypper" -$modules | ForEach-Object { - Remove-Item -Path "$BUILD_SALT_DIR\modules\$_*" -Recurse - if ( Test-Path -Path "$BUILD_SALT_DIR\modules\$_*" ) { - Write-Result "Failed" -ForegroundColor Red - Write-Host "Failed to remove: $BUILD_SALT_DIR\modules\$_" - exit 1 - } -} -Write-Result "Success" -ForegroundColor Green - -#------------------------------------------------------------------------------- -# Remove Non-Windows State Modules -#------------------------------------------------------------------------------- -Write-Host "Removing Non-Windows State Modules: " -NoNewline -$states = "acme", - "alternatives", - "apt", - "at", - "blockdev", - "ceph", - "cron", - "csf", - "deb", - "eselect", - "ethtool", - "firewalld", - "glusterfs", - "gnome", - "htpasswd", - "incron", - "ipset", - "iptables", - "k8s", - "kapacitor", - "keyboard", - "keystone", - "kmod", - "layman", - "linux", - "lxc", - "mac", - "makeconf", - "mdadm", - "monit", - "mount", - "nftables", - "pcs", - "pkgng", - "portage", - "powerpath", - "quota", - "redismod", - "smartos", - "snapper", - "ssh", - "supervisord", - "sysrc", - "trafficserver", - "tuned", - "vbox", - "virt.py", - "zfs", - "zpool" -$states | ForEach-Object { - Remove-Item -Path "$BUILD_SALT_DIR\states\$_*" -Recurse - if ( Test-Path -Path "$BUILD_SALT_DIR\states\$_*" ) { - Write-Result "Failed" -ForegroundColor Red - Write-Host "Failed to remove: $BUILD_SALT_DIR\states\$_" - exit 1 - } -} -Write-Result "Success" -ForegroundColor Green - Write-Host "Removing unneeded files (.pyc, .chm): " -NoNewline $remove = "__pycache__", "*.pyc", diff --git a/requirements/base.txt b/requirements/base.txt index c19d8804a2b1..6ed44eadddce 100644 --- a/requirements/base.txt +++ b/requirements/base.txt @@ -1,3 +1,5 @@ +--constraint=constraints.txt + Jinja2 jmespath msgpack>=0.5,!=0.5.5 diff --git a/requirements/constraints.txt b/requirements/constraints.txt new file mode 100644 index 000000000000..4406e011a336 --- /dev/null +++ b/requirements/constraints.txt @@ -0,0 +1,3 @@ +setuptools >= 65.6.3,< 69.0 +setuptools-scm < 8.0.0 +pip >= 22.3.1,< 23.0 diff --git a/requirements/darwin.txt b/requirements/darwin.txt index ebd6e174696a..fc689ebb13b8 100644 --- a/requirements/darwin.txt +++ b/requirements/darwin.txt @@ -3,16 +3,14 @@ -r zeromq.txt apache-libcloud>=2.4.0 -backports.ssl_match_hostname>=3.7.0.1; python_version < '3.7' cherrypy>=17.4.1 -cryptography>=2.6.1 -gitpython>=3.1.30; python_version >= '3.7' +cryptography>=41.0.3 +gitpython>=3.1.37 idna>=2.8 linode-python>=1.1.1 -mako>=1.0.7 pyasn1>=0.4.8 pycparser>=2.21 -pyopenssl>=19.0.0 +pyopenssl>=23.2.0 python-dateutil>=2.8.0 python-gnupg>=0.4.4 setproctitle>=1.2.3 diff --git a/requirements/pytest.txt b/requirements/pytest.txt index 5b67583a3d70..209db530ab15 100644 --- a/requirements/pytest.txt +++ b/requirements/pytest.txt @@ -1,13 +1,13 @@ mock >= 3.0.0 # PyTest -pytest >= 7.0.1; python_version <= "3.6" -pytest >= 7.2.0; python_version > "3.6" -pytest-salt-factories >= 1.0.0rc21; sys_platform == 'win32' -pytest-salt-factories[docker] >= 1.0.0rc21; sys_platform != 'win32' -pytest-tempdir >= 2019.10.12 +docker +pytest >= 7.2.0 +pytest-salt-factories >= 1.0.0rc28 pytest-helpers-namespace >= 2019.1.8 pytest-subtests pytest-timeout pytest-httpserver pytest-custom-exit-code >= 0.3 flaky +more-itertools +pyfakefs diff --git a/requirements/static/ci/changelog.in b/requirements/static/ci/changelog.in index 5f923d27d45a..ffff2d999168 100644 --- a/requirements/static/ci/changelog.in +++ b/requirements/static/ci/changelog.in @@ -1,3 +1,5 @@ +--constraint=./py{py_version}/{platform}.txt + towncrier==22.12.0 looseversion packaging diff --git a/requirements/static/ci/cloud.in b/requirements/static/ci/cloud.in index 54477748df28..29675cccf05d 100644 --- a/requirements/static/ci/cloud.in +++ b/requirements/static/ci/cloud.in @@ -1,6 +1,10 @@ # Cloud tests requirements +--constraint=./py{py_version}/{platform}.txt + apache-libcloud>=2.5.0 netaddr profitbricks pypsexec pywinrm +pyspnego==0.8.0 +smbprotocol==1.10.1 diff --git a/requirements/static/ci/common.in b/requirements/static/ci/common.in index 590410cb3d06..6e1af3c6a4e0 100644 --- a/requirements/static/ci/common.in +++ b/requirements/static/ci/common.in @@ -1,21 +1,20 @@ # Requirements in this file apply to all platforms. # We can also exclude platforms from the requirements using markers, but if a requirement only applies # to a particular platform, please add it to the corresponding `.in` file in this directory. -azure==4.0.0; sys_platform != 'win32' +--constraint=../pkg/py{py_version}/{platform}.txt + apache-libcloud>=1.5.0; sys_platform != 'win32' -boto3>=1.16.0,<1.17.0; python_version < '3.6' -boto3>=1.17.67; python_version >= '3.6' +boto3>=1.17.67 boto>=2.46.0 cassandra-driver>=2.0 certifi>=2022.12.07 -cffi>=1.12.2 +cffi>=1.14.6 cherrypy>=17.4.1 clustershell croniter>=0.3.0,!=0.3.22"; sys_platform != 'win32' dnspython -docker -etcd3-py==0.1.6 ; python_version >= '3.6' -gitpython>=3.1.30; python_version >= '3.7' +etcd3-py==0.1.6 +gitpython>=3.1.37 jmespath jsonschema junos-eznc; sys_platform != 'win32' and python_version <= '3.10' @@ -24,19 +23,22 @@ kazoo; sys_platform != 'win32' and sys_platform != 'darwin' keyring==5.7.1 kubernetes<4.0 libnacl>=1.7.1; sys_platform != 'win32' and sys_platform != 'darwin' -moto>=1.3.14 ; python_version <= '3.5' -moto>=2.0.0 ; python_version >= '3.6' +moto>=2.0.0 # Napalm pulls in pyeapi which does not work on Py3.10 -napalm; sys_platform != 'win32' and python_version > '3.6' and python_version < '3.10' +napalm; sys_platform != 'win32' and python_version < '3.10' paramiko>=2.10.1; sys_platform != 'win32' and sys_platform != 'darwin' -passlib[bcrypt]>=1.7.4 +# bcrypt is an extra requirement for passlib, and we shouldn't use extras, like, passlib[bcrypt] +# since that will break using the compiled static requirements files as contraints file +bcrypt +passlib>=1.7.4 +pynacl>=1.5.0 pycparser>=2.21; python_version >= '3.9' pyinotify>=0.9.6; sys_platform != 'win32' and sys_platform != 'darwin' and platform_system != "openbsd" python-etcd>0.4.2 pyvmomi requests rfc3987 -sqlparse>=0.4.2 +sqlparse>=0.4.4 strict_rfc3339>=0.7 toml vcert~=0.7.0; sys_platform != 'win32' @@ -46,3 +48,4 @@ watchdog>=0.9.0 genshi>=0.7.3 cheetah3>=3.2.2 mako +wempy diff --git a/requirements/static/ci/crypto.in b/requirements/static/ci/crypto.in index 83e67bcfc7e0..0724745aad2e 100644 --- a/requirements/static/ci/crypto.in +++ b/requirements/static/ci/crypto.in @@ -1,2 +1,4 @@ +--constraint=./py{py_version}/{platform}.txt + m2crypto pycryptodome diff --git a/requirements/static/ci/darwin.in b/requirements/static/ci/darwin.in index d7b8f010e0ff..bd95f1f41b73 100644 --- a/requirements/static/ci/darwin.in +++ b/requirements/static/ci/darwin.in @@ -2,8 +2,10 @@ # XXX: Temporarily do not install pylxd. # pylxd(or likely ws4py) will cause the test suite to hang at the finish line under runtests.py # pylxd>=2.2.5 +--constraint=../pkg/py{py_version}/{platform}.txt + yamlordereddictloader -pygit2>=1.2.0; python_version >= '3.7' +pygit2>=1.10.1 yamllint mercurial hglib diff --git a/requirements/static/ci/freebsd.in b/requirements/static/ci/freebsd.in index 20f88c8724a7..1fad585d0c9b 100644 --- a/requirements/static/ci/freebsd.in +++ b/requirements/static/ci/freebsd.in @@ -1,5 +1,7 @@ # FreeBSD static CI requirements -pygit2==1.8.0; python_version >= '3.7' +--constraint=../pkg/py{py_version}/{platform}.txt + +pygit2>=1.10.1 yamllint mercurial hglib diff --git a/requirements/static/ci/invoke.in b/requirements/static/ci/invoke.in deleted file mode 100644 index 87633def6566..000000000000 --- a/requirements/static/ci/invoke.in +++ /dev/null @@ -1,3 +0,0 @@ -invoke -blessings -pyyaml diff --git a/requirements/static/ci/lint.in b/requirements/static/ci/lint.in index 7b568d61d385..86ed3a61c288 100644 --- a/requirements/static/ci/lint.in +++ b/requirements/static/ci/lint.in @@ -1,4 +1,7 @@ # Lint requirements +--constraint=./py{py_version}/{platform}.txt + +docker pylint==2.4.4 -SaltPyLint>=v2020.9.28 +SaltPyLint>=2023.3.8 toml diff --git a/requirements/static/ci/linux.in b/requirements/static/ci/linux.in index c05c78de1e5b..203d66ce71f2 100644 --- a/requirements/static/ci/linux.in +++ b/requirements/static/ci/linux.in @@ -1,18 +1,16 @@ # Linux static CI requirements +--constraint=../pkg/py{py_version}/{platform}.txt + pyiface -pygit2<1.1.0; python_version <= '3.8' -pygit2>=1.4.0; python_version > '3.8' -pygit2==1.9.1; python_version >= '3.10' -pymysql==0.9.3; python_version <= '3.5' -pymysql>=1.0.2; python_version > '3.5' +pygit2>=1.10.1 +pymysql>=1.0.2 ansible>=4.4.0; python_version < '3.9' ansible>=7.0.0; python_version >= '3.9' twilio -python-telegram-bot==12.8; python_version <= '3.5' -python-telegram-bot>=13.7; python_version > '3.5' +python-telegram-bot>=13.7 yamllint mercurial hglib redis-py-cluster python-consul -slack_bolt +slack-bolt diff --git a/requirements/static/ci/pkgtests-windows.in b/requirements/static/ci/pkgtests-windows.in deleted file mode 100644 index f34edda6f043..000000000000 --- a/requirements/static/ci/pkgtests-windows.in +++ /dev/null @@ -1,5 +0,0 @@ -cherrypy -pytest-salt-factories==1.0.0rc17 -pythonnet==3.0.1; python_version >= '3.7' -pythonnet==2.5.2; python_version < '3.7' -wmi==1.5.1; sys_platform == 'win32' diff --git a/requirements/static/ci/pkgtests.in b/requirements/static/ci/pkgtests.in deleted file mode 100644 index e40f7d075e23..000000000000 --- a/requirements/static/ci/pkgtests.in +++ /dev/null @@ -1,2 +0,0 @@ -cherrypy -pytest-salt-factories==1.0.0rc17 diff --git a/requirements/static/ci/py3.10/changelog.txt b/requirements/static/ci/py3.10/changelog.txt index 0865f49c94b0..00114bce9491 100644 --- a/requirements/static/ci/py3.10/changelog.txt +++ b/requirements/static/ci/py3.10/changelog.txt @@ -2,26 +2,37 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/changelog.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/static/ci/changelog.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/changelog.txt requirements/static/ci/changelog.in # click-default-group==1.2.2 # via towncrier -click==7.1.2 +click==7.1.1 # via + # -c requirements/static/ci/py3.10/linux.txt # click-default-group # towncrier incremental==17.5.0 # via towncrier jinja2==3.1.2 - # via towncrier -looseversion==1.0.3 - # via -r requirements/static/ci/changelog.in -markupsafe==2.0.1 - # via jinja2 -packaging==23.0 - # via -r requirements/static/ci/changelog.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # towncrier +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/changelog.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/changelog.in tomli==2.0.1 - # via towncrier + # via + # -c requirements/static/ci/py3.10/linux.txt + # towncrier towncrier==22.12.0 # via -r requirements/static/ci/changelog.in diff --git a/requirements/static/ci/py3.10/cloud.txt b/requirements/static/ci/py3.10/cloud.txt index aa15dd90e2b0..2b962a7a2ed2 100644 --- a/requirements/static/ci/py3.10/cloud.txt +++ b/requirements/static/ci/py3.10/cloud.txt @@ -2,28 +2,33 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/cloud.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.7 +aiohttp==3.9.0 # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 - # via etcd3-py + # -c requirements/static/ci/py3.10/linux.txt + # etcd3-py aiosignal==1.2.0 - # via aiohttp + # via + # -c requirements/static/ci/py3.10/linux.txt + # aiohttp apache-libcloud==2.5.0 ; sys_platform != "win32" # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/cloud.in # -r requirements/static/ci/common.in -asn1crypto==1.4.0 +asn1crypto==1.3.0 # via + # -c requirements/static/ci/py3.10/linux.txt # certvalidator # oscrypto async-timeout==4.0.2 - # via aiohttp -attrs==21.2.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.10/linux.txt # aiohttp # jsonschema # pytest @@ -31,689 +36,469 @@ attrs==21.2.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.27 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.52 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 - # via virtualenv -bcrypt==3.2.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # moto boto==2.49.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in botocore==1.24.46 # via + # -c requirements/static/ci/py3.10/linux.txt # boto3 # moto # s3transfer cachetools==4.2.2 - # via google-auth -cassandra-driver==3.25.0 - # via -r requirements/static/ci/common.in -certifi==2022.12.7 # via + # -c requirements/static/ci/py3.10/linux.txt + # google-auth +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 - # via vcert + # via + # -c requirements/static/ci/py3.10/linux.txt + # vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp -cheetah3==3.2.6.post1 - # via -r requirements/static/ci/common.in +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in -click==8.0.1 - # via geomet +click==7.1.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # geomet clustershell==1.8.3 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in contextvars==2.4 - # via -r requirements/base.txt -croniter==1.0.15 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -cryptography==3.3.2 - # via - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in # etcd3-py # moto # paramiko # pyopenssl + # pyspnego # requests-ntlm # smbprotocol # vcert distlib==0.3.2 - # via virtualenv -distro==1.6.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # pytest-skip-markers -dnspython==2.1.0 +dnspython==1.16.0 # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # python-etcd -docker==5.0.2 +docker==6.1.3 # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 - # via pytest + # via + # -c requirements/static/ci/py3.10/linux.txt + # pytest filelock==3.0.12 - # via virtualenv + # via + # -c requirements/static/ci/py3.10/linux.txt + # virtualenv flaky==3.7.0 - # via -r requirements/pytest.txt + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt frozenlist==1.3.0 # via + # -c requirements/static/ci/py3.10/linux.txt # aiohttp # aiosignal genshi==0.7.5 - # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 - # via cassandra-driver + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in google-auth==2.1.0 - # via kubernetes -idna==2.8 # via + # -c requirements/static/ci/py3.10/linux.txt + # kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # etcd3-py # requests # yarl -immutables==0.16 - # via contextvars +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/static/pkg/linux.in -iniconfig==1.1.1 - # via pytest -ipaddress==1.0.23 - # via kubernetes -isodate==0.6.0 - # via msrest + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # pytest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.10/linux.txt + # kubernetes jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy -jaraco.functools==3.3.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # junos-eznc # moto -jmespath==0.10.0 +jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 # botocore jsonschema==3.2.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in junos-eznc==2.6.0 ; sys_platform != "win32" and python_version <= "3.10" - # via -r requirements/static/ci/common.in -jxmlease==1.0.3 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -kazoo==2.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in keyring==5.7.1 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in kubernetes==3.0.0 - # via -r requirements/static/ci/common.in -libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via + # -c requirements/static/ci/py3.10/linux.txt # junos-eznc # ncclient mako==1.2.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # jinja2 # mako # moto # werkzeug -mock==4.0.3 - # via -r requirements/pytest.txt -more-itertools==8.8.0 +mock==5.1.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +moto==3.0.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.21 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via + # -c requirements/static/ci/py3.10/linux.txt # aiohttp # yarl ncclient==0.6.9 - # via junos-eznc + # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc netaddr==0.7.19 # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/cloud.in # junos-eznc ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.2.0 # via - # requests-ntlm - # smbprotocol -oauthlib==3.2.2 - # via requests-oauthlib -oscrypto==1.2.1 - # via certvalidator -packaging==21.3 + # -c requirements/static/ci/py3.10/linux.txt + # certvalidator +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # junos-eznc # ncclient # scp -passlib[bcrypt]==1.7.4 - # via -r requirements/static/ci/common.in -platformdirs==2.3.0 - # via virtualenv -pluggy==1.0.0 - # via pytest -portend==2.7.1 - # via cherrypy +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # virtualenv +pluggy==0.13.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # cherrypy profitbricks==4.1.3 # via -r requirements/static/ci/cloud.in psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pyasn1-modules==0.2.8 - # via google-auth +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.10/linux.txt + # google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/py3.10/linux.txt # pyasn1-modules # rsa - # smbprotocol pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in # cffi -pycryptodomex==3.10.1 - # via -r requirements/crypto.txt +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" - # via -r requirements/static/ci/common.in -pyjwt==2.4.0 - # via adal -pynacl==1.4.0 - # via paramiko -pyopenssl==20.0.1 # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 # via + # -c requirements/static/ci/py3.10/linux.txt # junos-eznc - # packaging pypsexec==0.1.0 # via -r requirements/static/ci/cloud.in -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 - # via junos-eznc +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.10/linux.txt + # jsonschema +pyserial==3.4 + # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol pytest-custom-exit-code==0.3.0 - # via -r requirements/pytest.txt + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt pytest-helpers-namespace==2021.4.29 # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 - # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" - # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 - # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-httpserver==1.0.8 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 # via + # -c requirements/static/ci/py3.10/linux.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.5.0 - # via -r requirements/pytest.txt -pytest-system-statistics==1.0.2 - # via pytest-salt-factories -pytest-tempdir==2019.10.12 +pytest-subtests==0.4.0 # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.10/linux.txt # pytest-salt-factories -pytest-timeout==2.0.2 - # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/pytest.txt # pytest-custom-exit-code # pytest-helpers-namespace @@ -722,173 +507,236 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout -python-dateutil==2.8.2 +python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes # moto # vcert python-etcd==0.4.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # moto # tempora -pyvmomi==7.0.2 - # via -r requirements/static/ci/common.in +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes + # pytest-salt-factories # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/zeromq.txt # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # profitbricks # pyvmomi # pywinrm # requests-ntlm - # requests-oauthlib # responses # vcert -responses==0.14.0 - # via moto +responses==0.10.6 + # via + # -c requirements/static/ci/py3.10/linux.txt + # moto rfc3987==1.3.8 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 - # via google-auth -s3transfer==0.5.0 - # via boto3 -scp==0.14.1 - # via junos-eznc + # via + # -c requirements/static/ci/py3.10/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # boto3 +scp==0.13.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc semantic-version==2.9.0 - # via etcd3-py + # via + # -c requirements/static/ci/py3.10/linux.txt + # etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # msrestazure + # more-itertools # ncclient # paramiko # profitbricks - # pynacl - # pyopenssl # pypsexec # python-dateutil # pyvmomi # pywinrm # responses - # smbprotocol # transitions # vcert # virtualenv # websocket-client -smbprotocol==0.1.1 - # via pypsexec +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in strict-rfc3339==0.7 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # portend timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in tomli==2.0.1 - # via pytest + # via + # -c requirements/static/ci/py3.10/linux.txt + # pytest transitions==0.8.9 - # via junos-eznc -typing-extensions==4.2.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.10/linux.txt # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # botocore + # docker # kubernetes # python-etcd # requests - # responses vcert==0.7.4 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -virtualenv==20.8.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # pytest-salt-factories -watchdog==2.1.5 - # via -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in websocket-client==0.40.0 # via + # -c requirements/static/ci/py3.10/linux.txt # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 # via + # -c requirements/static/ci/py3.10/linux.txt # moto # pytest-httpserver xmltodict==0.12.0 # via + # -c requirements/static/ci/py3.10/linux.txt # moto # pywinrm yamlordereddictloader==0.4.0 - # via junos-eznc + # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc yarl==1.7.2 - # via aiohttp -zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/py3.10/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # cherrypy zipp==3.6.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.10/darwin-crypto.txt b/requirements/static/ci/py3.10/darwin-crypto.txt index 73a0145e1dc2..6b855b4848e0 100644 --- a/requirements/static/ci/py3.10/darwin-crypto.txt +++ b/requirements/static/ci/py3.10/darwin-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/darwin-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.10/darwin.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/darwin-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.10/darwin.txt b/requirements/static/ci/py3.10/darwin.txt index 75d7a117c1b5..f27842ae66be 100644 --- a/requirements/static/ci/py3.10/darwin.txt +++ b/requirements/static/ci/py3.10/darwin.txt @@ -2,18 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/darwin.txt --pip-args='--constraint=requirements/static/pkg/py3.10/darwin.txt' requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in # -adal==1.2.5 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp apache-libcloud==2.5.0 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt # -r requirements/static/ci/common.in asn1crypto==1.3.0 @@ -22,7 +19,7 @@ asn1crypto==1.3.0 # oscrypto async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -31,311 +28,13 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.26 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.51 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -350,32 +49,35 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt # -r requirements/static/ci/common.in click==7.0 @@ -383,16 +85,15 @@ click==7.0 clustershell==1.8.1 # via -r requirements/static/ci/common.in contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko @@ -402,17 +103,16 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt # pytest-skip-markers dnspython==1.16.0 # via # -r requirements/static/ci/common.in # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -428,50 +128,65 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 # via cassandra-driver -gitdb==4.0.5 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitdb==4.0.7 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt # -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/darwin.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/darwin.txt iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt # junos-eznc # moto jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -487,19 +202,22 @@ keyring==5.7.1 kubernetes==3.0.0 # via -r requirements/static/ci/common.in linode-python==1.1.1 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/darwin.txt looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc # ncclient mako==1.2.2 - # via - # -r requirements/darwin.txt - # -r requirements/static/ci/common.in + # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt # jinja2 # mako @@ -507,112 +225,23 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/darwin.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==8.2.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.19 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp @@ -621,20 +250,20 @@ ncclient==0.6.9 # via junos-eznc netaddr==0.7.19 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 # via # junos-eznc # ncclient # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.9.0 # via yamllint @@ -645,9 +274,12 @@ platformdirs==2.2.0 pluggy==0.13.1 # via pytest portend==2.6 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -656,30 +288,35 @@ pyasn1-modules==0.2.4 # via google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt # pyasn1-modules # rsa pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt # -r requirements/static/ci/common.in # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt -pygit2==1.9.1 ; python_version >= "3.7" + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/darwin.in -pyjwt==2.4.0 - # via adal -pynacl==1.3.0 - # via paramiko -pyopenssl==19.0.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt # etcd3-py pyparsing==3.0.9 - # via - # junos-eznc - # packaging + # via junos-eznc pyrsistent==0.17.3 # via jsonschema pyserial==3.4 @@ -691,13 +328,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -706,13 +343,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -722,14 +355,11 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/darwin.txt - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -738,46 +368,42 @@ python-dateutil==2.8.0 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/darwin.txt pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # pyvmomi - # requests-oauthlib # responses # vcert # vultr @@ -794,27 +420,23 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/darwin.txt six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kubernetes - # mock - # msrestazure # ncclient # paramiko - # pynacl - # pyopenssl # python-dateutil # pyvmomi # responses @@ -822,29 +444,37 @@ six==1.16.0 # vcert # virtualenv # websocket-client -smmap==3.0.2 - # via gitdb -sqlparse==0.4.2 +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # gitdb +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # portend timelib==0.2.5 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/darwin.txt toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc typing-extensions==4.2.0 # via # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -855,14 +485,18 @@ virtualenv==20.7.2 # -r requirements/static/ci/common.in # pytest-salt-factories vultr==1.0.1 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # -r requirements/darwin.txt watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver @@ -877,9 +511,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # cherrypy zipp==3.12.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.10/darwin.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.10/docs.txt b/requirements/static/ci/py3.10/docs.txt index 5bdd38ff930f..75bd763edc07 100644 --- a/requirements/static/ci/py3.10/docs.txt +++ b/requirements/static/ci/py3.10/docs.txt @@ -2,17 +2,17 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt # alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx -certifi==2022.12.7 +certifi==2023.07.22 # via # -c requirements/static/ci/py3.10/linux.txt # requests -chardet==3.0.4 +charset-normalizer==3.2.0 # via # -c requirements/static/ci/py3.10/linux.txt # requests @@ -34,7 +34,7 @@ distro==1.5.0 # -r requirements/base.txt docutils==0.19 # via sphinx -idna==2.8 +idna==3.2 # via # -c requirements/static/ci/py3.10/linux.txt # requests @@ -78,7 +78,7 @@ looseversion==1.0.2 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt -markdown-it-py==2.1.0 +markdown-it-py==2.2.0 # via # mdit-py-plugins # myst-docutils @@ -130,7 +130,7 @@ pytz==2022.1 # -c requirements/static/ci/py3.10/linux.txt # babel # tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt @@ -139,7 +139,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt @@ -177,13 +177,13 @@ tempora==4.1.1 # via # -c requirements/static/ci/py3.10/linux.txt # portend -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # -c requirements/static/ci/py3.10/linux.txt # myst-docutils uc-micro-py==1.0.1 # via linkify-it-py -urllib3==1.26.6 +urllib3==1.26.18 # via # -c requirements/static/ci/py3.10/linux.txt # requests diff --git a/requirements/static/ci/py3.10/freebsd-crypto.txt b/requirements/static/ci/py3.10/freebsd-crypto.txt index 845b2de521bd..da243ac9caf3 100644 --- a/requirements/static/ci/py3.10/freebsd-crypto.txt +++ b/requirements/static/ci/py3.10/freebsd-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/freebsd-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.10/freebsd.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/freebsd-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.10/freebsd.txt b/requirements/static/ci/py3.10/freebsd.txt index f67c0c60268d..95a14846949a 100644 --- a/requirements/static/ci/py3.10/freebsd.txt +++ b/requirements/static/ci/py3.10/freebsd.txt @@ -2,13 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/freebsd.txt --pip-args='--constraint=requirements/static/pkg/py3.10/freebsd.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -adal==1.2.5 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -20,7 +16,7 @@ asn1crypto==1.3.0 # oscrypto async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -29,311 +25,13 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.26 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.51 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -348,32 +46,35 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/freebsd.in click==7.1.2 @@ -381,15 +82,15 @@ click==7.1.2 clustershell==1.8.3 # via -r requirements/static/ci/common.in contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/static/pkg/freebsd.in # etcd3-py # moto # paramiko @@ -399,6 +100,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt # -r requirements/static/pkg/freebsd.in # pytest-skip-markers @@ -406,11 +108,9 @@ dnspython==1.16.0 # via # -r requirements/static/ci/common.in # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -426,47 +126,59 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.2.1.post1 # via cassandra-driver -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/freebsd.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/static/pkg/freebsd.in iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt # junos-eznc # moto jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -486,7 +198,9 @@ kubernetes==3.0.0 libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc @@ -495,6 +209,7 @@ mako==1.2.2 # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt # jinja2 # mako @@ -502,112 +217,23 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/freebsd.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.19 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp @@ -616,13 +242,13 @@ ncclient==0.6.9 # via junos-eznc netaddr==0.7.19 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -630,7 +256,7 @@ paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # junos-eznc # ncclient # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.9.0 # via yamllint @@ -641,9 +267,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -656,27 +285,31 @@ pyasn1==0.4.8 # rsa pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/freebsd.in # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt -pygit2==1.8.0 ; python_version >= "3.7" + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/freebsd.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in -pyjwt==2.4.0 - # via adal -pynacl==1.3.0 - # via paramiko -pyopenssl==19.1.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/static/pkg/freebsd.in # etcd3-py pyparsing==3.0.9 - # via - # junos-eznc - # packaging + # via junos-eznc pyrsistent==0.17.3 # via jsonschema pyserial==3.4 @@ -688,13 +321,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -703,13 +336,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -719,14 +348,11 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/static/pkg/freebsd.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -735,46 +361,42 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/static/pkg/freebsd.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # pyvmomi - # requests-oauthlib # responses # vcert responses==0.10.6 @@ -790,29 +412,25 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/static/pkg/freebsd.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # mock # more-itertools - # msrestazure # ncclient # paramiko - # pynacl - # pyopenssl # python-dateutil # pyvmomi # responses @@ -820,29 +438,35 @@ six==1.16.0 # vcert # virtualenv # websocket-client -smmap==3.0.4 +smmap==4.0.0 # via gitdb -sqlparse==0.4.2 +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # portend timelib==0.2.5 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # -r requirements/static/pkg/freebsd.in toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -858,7 +482,9 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver @@ -871,9 +497,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==1.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # cherrypy zipp==3.12.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.10/freebsd.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.10/invoke.txt b/requirements/static/ci/py3.10/invoke.txt deleted file mode 100644 index 591666c84684..000000000000 --- a/requirements/static/ci/py3.10/invoke.txt +++ /dev/null @@ -1,14 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements/static/ci/py3.10/invoke.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/static/ci/invoke.in -# -blessings==1.7 - # via -r requirements/static/ci/invoke.in -invoke==1.4.1 - # via -r requirements/static/ci/invoke.in -pyyaml==5.4.1 - # via -r requirements/static/ci/invoke.in -six==1.16.0 - # via blessings diff --git a/requirements/static/ci/py3.10/lint.txt b/requirements/static/ci/py3.10/lint.txt index a564ae9b00ad..1d62be50e88a 100644 --- a/requirements/static/ci/py3.10/lint.txt +++ b/requirements/static/ci/py3.10/lint.txt @@ -2,500 +2,314 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/lint.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.7 +aiohttp==3.9.0 # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 - # via etcd3-py + # -c requirements/static/ci/py3.10/linux.txt + # etcd3-py aiosignal==1.2.0 - # via aiohttp + # via + # -c requirements/static/ci/py3.10/linux.txt + # aiohttp ansible-core==2.14.1 - # via ansible + # via + # -c requirements/static/ci/py3.10/linux.txt + # ansible ansible==7.1.0 ; python_version >= "3.9" - # via -r requirements/static/ci/linux.in -apache-libcloud==3.3.1 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in apscheduler==3.6.3 - # via python-telegram-bot -asn1crypto==1.4.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # python-telegram-bot +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.10/linux.txt # certvalidator # oscrypto astroid==2.3.3 # via pylint async-timeout==4.0.2 - # via aiohttp -attrs==21.2.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.10/linux.txt # aiohttp # jsonschema -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.27 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.52 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 - # via virtualenv -bcrypt==3.2.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # moto boto==2.49.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in botocore==1.24.46 # via + # -c requirements/static/ci/py3.10/linux.txt # boto3 # moto # s3transfer cachetools==4.2.2 # via + # -c requirements/static/ci/py3.10/linux.txt # google-auth # python-telegram-bot -cassandra-driver==3.25.0 - # via -r requirements/static/ci/common.in -certifi==2022.12.7 +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # python-telegram-bot # requests certvalidator==0.11.1 - # via vcert + # via + # -c requirements/static/ci/py3.10/linux.txt + # vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # pygit2 # pynacl -charset-normalizer==2.0.4 +charset-normalizer==3.2.0 # via - # aiohttp + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # requests -cheetah3==3.2.6.post1 - # via -r requirements/static/ci/common.in +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in -click==8.0.1 - # via geomet +click==7.1.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # geomet clustershell==1.8.3 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in contextvars==2.4 - # via -r requirements/base.txt -croniter==1.0.15 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -cryptography==3.4.7 # via - # adal + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in # ansible-core - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko # pyopenssl # vcert distlib==0.3.2 - # via virtualenv -distro==1.6.0 - # via -r requirements/base.txt -dnspython==2.1.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/base.txt +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # python-etcd -docker==5.0.0 - # via -r requirements/static/ci/common.in -etcd3-py==0.1.6 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +docker==6.1.3 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/lint.in +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in filelock==3.0.12 - # via virtualenv + # via + # -c requirements/static/ci/py3.10/linux.txt + # virtualenv frozenlist==1.3.0 # via + # -c requirements/static/ci/py3.10/linux.txt # aiohttp # aiosignal genshi==0.7.5 - # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 - # via cassandra-driver + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" - # via -r requirements/static/ci/common.in -google-auth==2.0.1 - # via kubernetes + # via + # -c requirements/static/ci/py3.10/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # kubernetes hglib==2.6.1 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # etcd3-py # requests # yarl -immutables==0.16 - # via contextvars +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/static/pkg/linux.in -ipaddress==1.0.23 - # via kubernetes -isodate==0.6.0 - # via msrest + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.10/linux.txt + # kubernetes isort==4.3.21 # via pylint jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy -jaraco.functools==3.3.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # ansible-core # junos-eznc # moto -jmespath==0.10.0 +jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 # botocore jsonschema==3.2.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in junos-eznc==2.6.0 ; sys_platform != "win32" and python_version <= "3.10" - # via -r requirements/static/ci/common.in -jxmlease==1.0.3 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -kazoo==2.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in keyring==5.7.1 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in kubernetes==3.0.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in lazy-object-proxy==1.4.3 # via astroid -libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via + # -c requirements/static/ci/py3.10/linux.txt # junos-eznc # ncclient mako==1.2.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # jinja2 # mako @@ -504,218 +318,199 @@ markupsafe==2.1.2 mccabe==0.6.1 # via pylint mercurial==6.0.1 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in modernize==0.5 # via saltpylint -more-itertools==8.8.0 +more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +moto==3.0.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in msgpack==1.0.2 - # via -r requirements/base.txt -msrest==0.6.21 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/base.txt multidict==6.0.2 # via + # -c requirements/static/ci/py3.10/linux.txt # aiohttp # yarl ncclient==0.6.9 - # via junos-eznc -netaddr==0.8.0 - # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib -oscrypto==1.2.1 - # via certvalidator -packaging==21.3 # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc +netaddr==0.7.19 + # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # ansible-core + # docker paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # junos-eznc # ncclient # scp -passlib[bcrypt]==1.7.4 - # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in pathspec==0.9.0 - # via yamllint + # via + # -c requirements/static/ci/py3.10/linux.txt + # yamllint +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # watchdog platformdirs==2.2.0 - # via virtualenv -portend==2.7.1 - # via cherrypy + # via + # -c requirements/static/ci/py3.10/linux.txt + # virtualenv +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # cherrypy psutil==5.8.0 - # via -r requirements/base.txt -pyasn1-modules==0.2.8 - # via google-auth + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.10/linux.txt + # google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/py3.10/linux.txt # pyasn1-modules # rsa pycodestyle==2.5.0 # via saltpylint pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in # cffi -pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pygit2==1.9.1 ; python_version >= "3.10" - # via -r requirements/static/ci/linux.in +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in pyiface==0.0.11 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in pyjwt==2.4.0 # via - # adal + # -c requirements/static/ci/py3.10/linux.txt # twilio pylint==2.4.4 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 ; python_version > "3.5" - # via -r requirements/static/ci/linux.in -pynacl==1.4.0 - # via paramiko -pyopenssl==20.0.1 +pymysql==1.0.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.10/linux.txt + # jsonschema +pyserial==3.4 + # via + # -c requirements/static/ci/py3.10/linux.txt # junos-eznc - # packaging -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 - # via junos-eznc python-consul==1.1.0 - # via -r requirements/static/ci/linux.in -python-dateutil==2.8.2 # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes # moto # vcert python-etcd==0.4.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in -python-telegram-bot==13.7 ; python_version > "3.5" - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # apscheduler # moto # python-telegram-bot # tempora # twilio -pyvmomi==7.0.2 - # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyvmomi==6.7.1.2018.12 # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # ansible-core # clustershell @@ -724,58 +519,78 @@ pyyaml==5.4.1 # yamllint # yamlordereddictloader pyzmq==23.2.0 - # via -r requirements/zeromq.txt + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/zeromq.txt redis-py-cluster==2.1.3 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in redis==3.5.3 - # via redis-py-cluster -requests-oauthlib==1.3.0 - # via msrest -requests==2.26.0 # via + # -c requirements/static/ci/py3.10/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # python-consul # pyvmomi - # requests-oauthlib # responses # twilio # vcert resolvelib==0.5.4 - # via ansible-core -responses==0.13.4 - # via moto + # via + # -c requirements/static/ci/py3.10/linux.txt + # ansible-core +responses==0.10.6 + # via + # -c requirements/static/ci/py3.10/linux.txt + # moto rfc3987==1.3.8 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 - # via google-auth -s3transfer==0.5.0 - # via boto3 -saltpylint==2020.9.28 + # via + # -c requirements/static/ci/py3.10/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # boto3 +saltpylint==2023.8.3 # via -r requirements/static/ci/lint.in -scp==0.13.6 - # via junos-eznc +scp==0.13.2 + # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc semantic-version==2.9.0 - # via etcd3-py + # via + # -c requirements/static/ci/py3.10/linux.txt + # etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # apscheduler # astroid # bcrypt @@ -784,16 +599,13 @@ six==1.16.0 # etcd3-py # genshi # geomet - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # msrestazure + # more-itertools # ncclient # paramiko - # pynacl - # pyopenssl # python-consul # python-dateutil # pyvmomi @@ -803,64 +615,118 @@ six==1.16.0 # virtualenv # websocket-client slack-bolt==1.15.5 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in slack-sdk==3.19.5 - # via slack-bolt + # via + # -c requirements/static/ci/py3.10/linux.txt + # slack-bolt smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in strict-rfc3339==0.7 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # portend timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 # via + # -c requirements/static/ci/py3.10/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/ci/lint.in tornado==6.1 - # via python-telegram-bot -transitions==0.8.8 - # via junos-eznc + # via + # -c requirements/static/ci/py3.10/linux.txt + # python-telegram-bot +transitions==0.8.9 + # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc twilio==7.9.2 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in tzlocal==3.0 - # via apscheduler -urllib3==1.26.6 # via + # -c requirements/static/ci/py3.10/linux.txt + # apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt # botocore + # docker # kubernetes # python-etcd # requests - # responses vcert==0.7.4 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in virtualenv==20.7.2 - # via -r requirements/static/ci/common.in -watchdog==2.1.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in websocket-client==0.40.0 # via + # -c requirements/static/ci/py3.10/linux.txt # docker # kubernetes -werkzeug==2.2.3 - # via moto +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.10/linux.txt + # moto wrapt==1.11.1 # via astroid xmltodict==0.12.0 - # via moto + # via + # -c requirements/static/ci/py3.10/linux.txt + # moto yamllint==1.26.3 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.10/linux.txt + # -r requirements/static/ci/linux.in yamlordereddictloader==0.4.0 - # via junos-eznc + # via + # -c requirements/static/ci/py3.10/linux.txt + # junos-eznc yarl==1.7.2 - # via aiohttp -zc.lockfile==2.0 - # via cherrypy -zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/py3.10/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -c requirements/static/ci/py3.10/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.10/linux-crypto.txt b/requirements/static/ci/py3.10/linux-crypto.txt index 76ea98e30f50..f824107616f4 100644 --- a/requirements/static/ci/py3.10/linux-crypto.txt +++ b/requirements/static/ci/py3.10/linux-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/linux-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/linux-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.10/linux.txt b/requirements/static/ci/py3.10/linux.txt index d1ab1dee4177..2cdc0f15e6a6 100644 --- a/requirements/static/ci/py3.10/linux.txt +++ b/requirements/static/ci/py3.10/linux.txt @@ -2,13 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/linux.txt --pip-args='--constraint=requirements/static/pkg/py3.10/linux.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.3 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -26,7 +22,7 @@ asn1crypto==1.3.0 # oscrypto async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -35,316 +31,13 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.18 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.5 - # via azure -azure-datalake-store==0.0.44 - # via azure -azure-eventgrid==1.2.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.0 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.4.1 - # via azure-mgmt -azure-mgmt-containerregistry==2.7.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.5.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.0 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.6.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.0.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.8.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.1.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.0.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.6 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.0 - # via - # azure-cosmosdb-table - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -361,33 +54,36 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # python-telegram-bot # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in click==7.1.1 @@ -395,16 +91,16 @@ click==7.1.1 clustershell==1.8.3 # via -r requirements/static/ci/common.in contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via - # adal + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/pkg/linux.in # ansible-core - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko @@ -414,17 +110,16 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # pytest-skip-markers dnspython==1.16.0 # via # -r requirements/static/ci/common.in # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -440,48 +135,60 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 # via cassandra-driver -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/linux.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/pkg/linux.in iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # ansible-core # junos-eznc # moto jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -501,7 +208,9 @@ kubernetes==3.0.0 libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc @@ -510,6 +219,7 @@ mako==1.2.2 # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # jinja2 # mako @@ -517,109 +227,23 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/linux.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.14 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.3 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp @@ -628,14 +252,14 @@ ncclient==0.6.9 # via junos-eznc netaddr==0.7.19 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # ansible-core + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -643,7 +267,7 @@ paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # junos-eznc # ncclient # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via -r requirements/static/ci/common.in pathspec==0.9.0 # via yamllint @@ -654,9 +278,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -669,27 +296,33 @@ pyasn1==0.4.8 # rsa pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt -pygit2==1.9.1 ; python_version >= "3.10" + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/linux.in pyiface==0.0.11 # via -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in pyjwt==2.4.0 - # via - # adal - # twilio -pymysql==1.0.2 ; python_version > "3.5" + # via twilio +pymysql==1.0.2 # via -r requirements/static/ci/linux.in -pynacl==1.3.0 - # via paramiko -pyopenssl==19.1.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 @@ -705,13 +338,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -720,13 +353,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -736,16 +365,13 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-consul==1.1.0 # via -r requirements/static/ci/linux.in python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -754,58 +380,53 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in -python-telegram-bot==13.7 ; python_version > "3.5" + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 # via -r requirements/static/ci/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # apscheduler # moto # python-telegram-bot # tempora # twilio - # tzlocal pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # ansible-core # clustershell # junos-eznc # kubernetes + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/zeromq.txt # pytest-salt-factories redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # python-consul # pyvmomi - # requests-oauthlib # responses # twilio # vcert @@ -816,7 +437,9 @@ responses==0.10.6 rfc3987==1.3.8 # via -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 # via google-auth s3transfer==0.5.2 @@ -826,29 +449,26 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # apscheduler # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # mock # more-itertools # ncclient # paramiko - # pynacl - # pyopenssl # python-consul # python-dateutil # pyvmomi @@ -861,35 +481,41 @@ slack-bolt==1.15.5 # via -r requirements/static/ci/linux.in slack-sdk==3.19.5 # via slack-bolt -smmap==3.0.4 +smmap==4.0.0 # via gitdb -sqlparse==0.4.2 +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # portend timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest tornado==6.1 # via python-telegram-bot -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc twilio==7.9.2 # via -r requirements/static/ci/linux.in -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # pytest-shell-utilities # pytest-system-statistics -tzlocal==2.1 +tzlocal==3.0 # via apscheduler -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -905,7 +531,9 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver @@ -918,9 +546,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==1.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # cherrypy zipp==3.6.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.10/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.10/pkgtests-windows.txt b/requirements/static/ci/py3.10/pkgtests-windows.txt deleted file mode 100644 index 2c2244b00323..000000000000 --- a/requirements/static/ci/py3.10/pkgtests-windows.txt +++ /dev/null @@ -1,168 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements/static/ci/py3.10/pkgtests-windows.txt requirements/base.txt requirements/static/ci/pkgtests-windows.in requirements/zeromq.txt -# -attrs==22.2.0 - # via - # pytest - # pytest-salt-factories - # pytest-shell-utilities - # pytest-skip-markers - # pytest-system-statistics -autocommand==2.2.2 - # via jaraco.text -certifi==2022.12.7 - # via requests -cffi==1.15.1 - # via clr-loader -charset-normalizer==3.0.1 - # via requests -cheroot==9.0.0 - # via cherrypy -cherrypy==18.8.0 - # via -r requirements/static/ci/pkgtests-windows.in -clr-loader==0.2.5 - # via pythonnet -colorama==0.4.6 - # via pytest -contextvars==2.4 - # via -r requirements/base.txt -distlib==0.3.6 - # via virtualenv -distro==1.8.0 - # via - # -r requirements/base.txt - # pytest-skip-markers -exceptiongroup==1.1.0 - # via pytest -filelock==3.9.0 - # via virtualenv -idna==3.4 - # via requests -immutables==0.19 - # via contextvars -inflect==6.0.2 - # via jaraco.text -iniconfig==2.0.0 - # via pytest -jaraco.classes==3.2.3 - # via jaraco.collections -jaraco.collections==3.8.0 - # via cherrypy -jaraco.context==4.3.0 - # via jaraco.text -jaraco.functools==3.5.2 - # via - # cheroot - # jaraco.text - # tempora -jaraco.text==3.11.1 - # via jaraco.collections -jinja2==3.1.2 - # via -r requirements/base.txt -jmespath==1.0.1 - # via -r requirements/base.txt -looseversion==1.0.3 - # via -r requirements/base.txt -markupsafe==2.1.2 - # via - # -r requirements/base.txt - # jinja2 -more-itertools==9.0.0 - # via - # cheroot - # cherrypy - # jaraco.classes - # jaraco.functools - # jaraco.text -msgpack==1.0.4 - # via - # -r requirements/base.txt - # pytest-salt-factories -packaging==23.0 - # via - # -r requirements/base.txt - # pytest -platformdirs==2.6.2 - # via virtualenv -pluggy==1.0.0 - # via pytest -portend==3.1.0 - # via cherrypy -psutil==5.9.4 - # via - # -r requirements/base.txt - # pytest-salt-factories - # pytest-shell-utilities - # pytest-system-statistics -pycparser==2.21 - # via cffi -pycryptodomex==3.17 - # via -r requirements/crypto.txt -pydantic==1.10.4 - # via inflect -pytest-helpers-namespace==2021.12.29 - # via - # pytest-salt-factories - # pytest-shell-utilities -pytest-salt-factories==1.0.0rc17 - # via -r requirements/static/ci/pkgtests-windows.in -pytest-shell-utilities==1.7.0 - # via pytest-salt-factories -pytest-skip-markers==1.4.0 - # via - # pytest-salt-factories - # pytest-shell-utilities - # pytest-system-statistics -pytest-system-statistics==1.0.2 - # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via pytest-salt-factories -pytest==7.2.1 - # via - # pytest-helpers-namespace - # pytest-salt-factories - # pytest-shell-utilities - # pytest-skip-markers - # pytest-system-statistics - # pytest-tempdir -pythonnet==3.0.1 ; python_version >= "3.7" - # via -r requirements/static/ci/pkgtests-windows.in -pytz==2022.7.1 - # via tempora -pywin32==305 - # via - # pytest-skip-markers - # wmi -pyyaml==6.0 - # via -r requirements/base.txt -pyzmq==25.0.0 ; sys_platform == "win32" - # via - # -r requirements/zeromq.txt - # pytest-salt-factories -requests==2.28.2 - # via -r requirements/base.txt -six==1.16.0 - # via cheroot -tempora==5.2.1 - # via portend -tomli==2.0.1 - # via pytest -typing-extensions==4.4.0 - # via - # pydantic - # pytest-shell-utilities - # pytest-system-statistics -urllib3==1.26.14 - # via requests -virtualenv==20.18.0 - # via pytest-salt-factories -wmi==1.5.1 ; sys_platform == "win32" - # via -r requirements/static/ci/pkgtests-windows.in -zc.lockfile==2.0 - # via cherrypy - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/static/ci/py3.10/pkgtests.txt b/requirements/static/ci/py3.10/pkgtests.txt deleted file mode 100644 index cf73559bfdc3..000000000000 --- a/requirements/static/ci/py3.10/pkgtests.txt +++ /dev/null @@ -1,151 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements/static/ci/py3.10/pkgtests.txt requirements/base.txt requirements/static/ci/pkgtests.in requirements/zeromq.txt -# -attrs==22.2.0 - # via - # pytest - # pytest-salt-factories - # pytest-shell-utilities - # pytest-skip-markers - # pytest-system-statistics -autocommand==2.2.2 - # via jaraco.text -certifi==2022.12.7 - # via requests -charset-normalizer==3.0.1 - # via requests -cheroot==9.0.0 - # via cherrypy -cherrypy==18.8.0 - # via -r requirements/static/ci/pkgtests.in -contextvars==2.4 - # via -r requirements/base.txt -distlib==0.3.6 - # via virtualenv -distro==1.8.0 - # via - # -r requirements/base.txt - # pytest-skip-markers -exceptiongroup==1.1.0 - # via pytest -filelock==3.9.0 - # via virtualenv -idna==3.4 - # via requests -immutables==0.19 - # via contextvars -inflect==6.0.2 - # via jaraco.text -iniconfig==2.0.0 - # via pytest -jaraco.classes==3.2.3 - # via jaraco.collections -jaraco.collections==3.8.0 - # via cherrypy -jaraco.context==4.2.0 - # via jaraco.text -jaraco.functools==3.5.2 - # via - # cheroot - # jaraco.text -jaraco.text==3.11.0 - # via jaraco.collections -jinja2==3.1.2 - # via -r requirements/base.txt -jmespath==1.0.1 - # via -r requirements/base.txt -looseversion==1.0.3 - # via -r requirements/base.txt -markupsafe==2.1.1 - # via - # -r requirements/base.txt - # jinja2 -more-itertools==9.0.0 - # via - # cheroot - # cherrypy - # jaraco.classes - # jaraco.functools - # jaraco.text -msgpack==1.0.4 - # via - # -r requirements/base.txt - # pytest-salt-factories -packaging==23.0 - # via - # -r requirements/base.txt - # pytest -platformdirs==2.6.2 - # via virtualenv -pluggy==1.0.0 - # via pytest -portend==3.1.0 - # via cherrypy -psutil==5.9.4 - # via - # -r requirements/base.txt - # pytest-salt-factories - # pytest-shell-utilities - # pytest-system-statistics -pycryptodomex==3.16.0 - # via -r requirements/crypto.txt -pydantic==1.10.4 - # via inflect -pytest-helpers-namespace==2021.12.29 - # via - # pytest-salt-factories - # pytest-shell-utilities -pytest-salt-factories==1.0.0rc17 - # via -r requirements/static/ci/pkgtests.in -pytest-shell-utilities==1.7.0 - # via pytest-salt-factories -pytest-skip-markers==1.4.0 - # via - # pytest-salt-factories - # pytest-shell-utilities - # pytest-system-statistics -pytest-system-statistics==1.0.2 - # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via pytest-salt-factories -pytest==7.2.1 - # via - # pytest-helpers-namespace - # pytest-salt-factories - # pytest-shell-utilities - # pytest-skip-markers - # pytest-system-statistics - # pytest-tempdir -pytz==2022.7.1 - # via tempora -pyyaml==6.0 - # via -r requirements/base.txt -pyzmq==25.0.0 - # via - # -r requirements/zeromq.txt - # pytest-salt-factories -requests==2.28.2 - # via -r requirements/base.txt -six==1.16.0 - # via cheroot -tempora==5.2.0 - # via portend -tomli==2.0.1 - # via pytest -typing-extensions==4.4.0 - # via - # pydantic - # pytest-shell-utilities - # pytest-system-statistics -urllib3==1.26.14 - # via requests -virtualenv==20.17.1 - # via pytest-salt-factories -zc.lockfile==2.0 - # via cherrypy - -# The following packages are considered to be unsafe in a requirements file: -# setuptools diff --git a/requirements/static/ci/py3.10/tools-virustotal.txt b/requirements/static/ci/py3.10/tools-virustotal.txt new file mode 100644 index 000000000000..e190ae385378 --- /dev/null +++ b/requirements/static/ci/py3.10/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.10/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../ci/py3.10/tools.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../ci/py3.10/tools.txt + # requests +idna==3.2 + # via + # -c requirements/static/ci/../ci/py3.10/tools.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../ci/py3.10/tools.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../ci/py3.10/tools.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.10/tools.txt b/requirements/static/ci/py3.10/tools.txt index ae921eb91f10..e390aabbffdd 100644 --- a/requirements/static/ci/py3.10/tools.txt +++ b/requirements/static/ci/py3.10/tools.txt @@ -2,25 +2,23 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/tools.txt --pip-args='--constraint=requirements/static/ci/py3.10/linux.txt' requirements/static/ci/tools.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/tools.txt requirements/static/ci/tools.in # -attrs==22.1.0 +attrs==20.3.0 # via # -r requirements/static/ci/tools.in # python-tools-scripts -boto3==1.21.46 +boto3==1.26.147 # via -r requirements/static/ci/tools.in -botocore==1.24.46 +botocore==1.29.147 # via # boto3 # s3transfer -certifi==2022.12.7 +certifi==2023.7.22 # via requests -charset-normalizer==3.0.1 +charset-normalizer==3.2.0 # via requests -commonmark==0.9.1 - # via rich -idna==3.4 +idna==3.2 # via requests jinja2==3.1.2 # via -r requirements/static/ci/tools.in @@ -28,29 +26,33 @@ jmespath==1.0.1 # via # boto3 # botocore +markdown-it-py==2.2.0 + # via rich markupsafe==2.1.2 # via jinja2 -packaging==23.0 +mdurl==0.1.2 + # via markdown-it-py +packaging==22.0 # via -r requirements/static/ci/tools.in pygments==2.13.0 # via rich -python-dateutil==2.8.2 +python-dateutil==2.8.1 # via botocore -python-tools-scripts==0.11.1 +python-tools-scripts==0.18.6 # via -r requirements/static/ci/tools.in -pyyaml==6.0 +pyyaml==6.0.1 # via -r requirements/static/ci/tools.in -requests==2.28.2 +requests==2.31.0 # via python-tools-scripts -rich==12.5.1 +rich==13.3.5 # via python-tools-scripts -s3transfer==0.5.2 +s3transfer==0.6.1 # via boto3 six==1.16.0 # via python-dateutil -typing-extensions==4.4.0 +typing-extensions==4.2.0 # via python-tools-scripts -urllib3==1.26.12 +urllib3==1.26.18 # via # botocore # requests diff --git a/requirements/static/ci/py3.10/windows-crypto.txt b/requirements/static/ci/py3.10/windows-crypto.txt index ad36c32ac360..18248fac6f10 100644 --- a/requirements/static/ci/py3.10/windows-crypto.txt +++ b/requirements/static/ci/py3.10/windows-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/windows-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.10/windows.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/windows-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.37.1 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.10/windows.txt b/requirements/static/ci/py3.10/windows.txt index 265b2dfb66da..08d00aea31b4 100644 --- a/requirements/static/ci/py3.10/windows.txt +++ b/requirements/static/ci/py3.10/windows.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.10/windows.txt --pip-args='--constraint=requirements/static/pkg/py3.10/windows.txt' requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.10/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.3 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.3.1 # via aiohttp async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -20,8 +20,8 @@ attrs==20.3.0 # pytest-skip-markers # pytest-system-statistics bcrypt==4.0.1 - # via passlib -boto3==1.21.46 ; python_version >= "3.6" + # via -r requirements/static/ci/common.in +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -36,43 +36,54 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # kubernetes # requests cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # clr-loader # cryptography # pygit2 -chardet==3.0.4 - # via requests -charset-normalizer==2.1.1 - # via aiohttp + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # requests cheetah3==3.2.6.post1 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt click==7.1.2 # via geomet clr-loader==0.2.4 - # via pythonnet + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # pythonnet clustershell==1.8.3 # via -r requirements/static/ci/common.in colorama==0.4.1 # via pytest contextvars==2.4 - # via -r requirements/base.txt -cryptography==3.4.7 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/base.txt +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/windows.txt # etcd3-py # moto @@ -82,6 +93,7 @@ distlib==0.3.6 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt # pytest-skip-markers dmidecode==0.9.0 @@ -90,11 +102,9 @@ dnspython==1.16.0 # via # -r requirements/static/ci/common.in # python-etcd -docker-pycreds==0.4.0 - # via docker -docker==2.7.0 - # via -r requirements/static/ci/common.in -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -111,45 +121,64 @@ genshi==0.7.5 geomet==0.1.2 # via cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt iniconfig==1.0.1 # via pytest ioloop==0.1a0 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt ipaddress==1.0.22 # via kubernetes jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # jaraco.collections jaraco.collections==3.3.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.0 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt # moto jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -161,32 +190,38 @@ keyring==5.7.1 kubernetes==3.0.0 # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/base.txt lxml==4.9.1 - # via -r requirements/windows.txt -mako==1.2.2 # via - # -r requirements/static/ci/common.in + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/windows.txt -markupsafe==2.1.1 +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt # jinja2 # mako # moto # werkzeug -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==8.2.0 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt # pytest-salt-factories multidict==6.0.2 @@ -195,11 +230,13 @@ multidict==6.0.2 # yarl ntlm-auth==1.5.0 # via requests-ntlm -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt + # docker # pytest -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via -r requirements/static/ci/common.in patch==1.16 # via -r requirements/static/ci/windows.in @@ -212,9 +249,12 @@ platformdirs==2.5.4 pluggy==0.13.0 # via pytest portend==2.6 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -223,28 +263,39 @@ pyasn1-modules==0.2.4 # via google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/windows.txt # pyasn1-modules # rsa pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # cffi pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pygit2==1.9.1 ; python_version >= "3.7" + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/windows.in pymssql==2.2.7 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt pymysql==1.0.2 - # via -r requirements/windows.txt -pyopenssl==20.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/windows.txt # etcd3-py -pyparsing==3.0.9 - # via packaging pyrsistent==0.17.3 # via jsonschema pytest-custom-exit-code==0.3.0 @@ -254,13 +305,13 @@ pytest-helpers-namespace==2021.12.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.6 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc21 ; sys_platform == "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.7.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.3.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -269,13 +320,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==2.1.0 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -285,10 +332,10 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/windows.txt # botocore # kubernetes @@ -296,36 +343,47 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt pythonnet==3.0.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in pywin32==305 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/windows.txt + # docker # pytest-skip-markers # wmi pywinrm==0.4.1 # via -r requirements/static/ci/windows.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt # clustershell # kubernetes + # pytest-salt-factories # yamllint -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/zeromq.txt # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt @@ -350,36 +408,40 @@ sed==0.3.1 semantic-version==2.10.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt six==1.15.0 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # cassandra-driver # cheroot - # docker - # docker-pycreds # etcd3-py # genshi # geomet - # google-auth # jsonschema # kubernetes - # mock - # pyopenssl # python-dateutil # pyvmomi # pywinrm # responses # websocket-client smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # gitdb +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # portend timelib==0.2.5 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 @@ -388,10 +450,12 @@ typing-extensions==4.4.0 # via # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt # -r requirements/windows.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -405,14 +469,20 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver wheel==0.38.4 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt wmi==1.5.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # -r requirements/windows.txt xmltodict==0.12.0 # via # moto @@ -422,9 +492,13 @@ yamllint==1.28.0 yarl==1.8.1 # via aiohttp zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # cherrypy zipp==3.12.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.10/windows.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.11/changelog.txt b/requirements/static/ci/py3.11/changelog.txt new file mode 100644 index 000000000000..2aa97aa5da28 --- /dev/null +++ b/requirements/static/ci/py3.11/changelog.txt @@ -0,0 +1,36 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/changelog.txt requirements/static/ci/changelog.in +# +click-default-group==1.2.2 + # via towncrier +click==7.1.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # click-default-group + # towncrier +incremental==17.5.0 + # via towncrier +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # towncrier +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/changelog.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/changelog.in +towncrier==22.12.0 + # via -r requirements/static/ci/changelog.in + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/cloud.txt b/requirements/static/ci/py3.11/cloud.txt new file mode 100644 index 000000000000..e8493764de5c --- /dev/null +++ b/requirements/static/ci/py3.11/cloud.txt @@ -0,0 +1,685 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py +aiosignal==1.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/cloud.in + # -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # -c requirements/static/ci/py3.11/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # geomet +clustershell==1.8.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py + # moto + # paramiko + # pyopenssl + # pyspnego + # requests-ntlm + # smbprotocol + # vcert +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +filelock==3.0.12 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +flaky==3.7.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # aiosignal +genshi==0.7.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.11/linux.txt + # kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # yarl +netaddr==0.7.19 + # via -r requirements/static/ci/cloud.in +ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +pluggy==0.13.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +profitbricks==4.1.3 + # via -r requirements/static/ci/cloud.in +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pypsexec==0.1.0 + # via -r requirements/static/ci/cloud.in +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jsonschema +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol +pytest-custom-exit-code==0.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest-salt-factories +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pywinrm==0.3.0 + # via -r requirements/static/ci/cloud.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # profitbricks + # pyvmomi + # pywinrm + # requests-ntlm + # responses + # vcert +responses==0.10.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # boto3 +semantic-version==2.9.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # profitbricks + # pypsexec + # python-dateutil + # pyvmomi + # pywinrm + # responses + # vcert + # virtualenv + # websocket-client +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec +smmap==4.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto + # pywinrm +yarl==1.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/darwin-crypto.txt b/requirements/static/ci/py3.11/darwin-crypto.txt new file mode 100644 index 000000000000..c0aacf410771 --- /dev/null +++ b/requirements/static/ci/py3.11/darwin-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/darwin-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/darwin.txt b/requirements/static/ci/py3.11/darwin.txt new file mode 100644 index 000000000000..2607474a10f2 --- /dev/null +++ b/requirements/static/ci/py3.11/darwin.txt @@ -0,0 +1,481 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via -r requirements/static/ci/common.in +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +click==7.0 + # via geomet +clustershell==1.8.1 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # etcd3-py + # moto + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/darwin.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +linode-python==1.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/darwin.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.1 + # via pytest +portend==2.6 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 + # via -r requirements/static/ci/darwin.in +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint + # yamlordereddictloader +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # responses + # vcert + # vultr +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kubernetes + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.2.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +vultr==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # -r requirements/darwin.txt +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/darwin.in +yamlordereddictloader==0.4.0 + # via -r requirements/static/ci/darwin.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.11/darwin.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/docs.txt b/requirements/static/ci/py3.11/docs.txt new file mode 100644 index 000000000000..1a2bac96dca8 --- /dev/null +++ b/requirements/static/ci/py3.11/docs.txt @@ -0,0 +1,196 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# +alabaster==0.7.12 + # via sphinx +babel==2.9.1 + # via sphinx +certifi==2023.07.22 + # via + # -c requirements/static/ci/py3.11/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # requests +cheroot==8.5.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/docs.in +contextvars==2.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +distro==1.5.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +docutils==0.19 + # via sphinx +idna==3.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # requests +imagesize==1.4.1 + # via sphinx +immutables==0.15 + # via + # -c requirements/static/ci/py3.11/linux.txt + # contextvars +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # myst-docutils + # sphinx +jmespath==1.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +linkify-it-py==1.0.3 + # via myst-docutils +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +markdown-it-py==2.2.0 + # via + # mdit-py-plugins + # myst-docutils +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 +mdit-py-plugins==0.3.3 + # via myst-docutils +mdurl==0.1.2 + # via markdown-it-py +more-itertools==5.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +myst-docutils[linkify]==0.18.1 + # via -r requirements/static/ci/docs.in +packaging==22.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # sphinx +portend==2.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/crypto.txt +pyenchant==3.2.2 + # via sphinxcontrib-spelling +pygments==2.14.0 + # via sphinx +pytz==2022.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # babel + # tempora +pyyaml==6.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # myst-docutils +pyzmq==23.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/zeromq.txt +requests==2.31.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # sphinx +six==1.16.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # more-itertools + # sphinxcontrib.httpdomain +snowballstemmer==2.1.0 + # via sphinx +sphinx==6.1.3 ; python_version >= "3.9" + # via + # -r requirements/static/ci/docs.in + # sphinxcontrib-spelling + # sphinxcontrib.httpdomain +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +sphinxcontrib-spelling==7.7.0 + # via -r requirements/static/ci/docs.in +sphinxcontrib.httpdomain==1.8.1 + # via -r requirements/static/ci/docs.in +tempora==4.1.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # portend +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # myst-docutils +uc-micro-py==1.0.1 + # via linkify-it-py +urllib3==1.26.18 + # via + # -c requirements/static/ci/py3.11/linux.txt + # requests +zc.lockfile==1.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/freebsd-crypto.txt b/requirements/static/ci/py3.11/freebsd-crypto.txt new file mode 100644 index 000000000000..33399b9ff519 --- /dev/null +++ b/requirements/static/ci/py3.11/freebsd-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/freebsd-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/freebsd.txt b/requirements/static/ci/py3.11/freebsd.txt new file mode 100644 index 000000000000..4f701987b635 --- /dev/null +++ b/requirements/static/ci/py3.11/freebsd.txt @@ -0,0 +1,473 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.24.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/freebsd.in +click==7.1.2 + # via geomet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/pkg/freebsd.in + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via cassandra-driver +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/freebsd.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/freebsd.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/freebsd.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 + # via -r requirements/static/ci/freebsd.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # responses + # vcert +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +smmap==4.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # -r requirements/static/pkg/freebsd.in +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.8.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/freebsd.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.11/freebsd.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/lint.txt b/requirements/static/ci/py3.11/lint.txt new file mode 100644 index 000000000000..cb69e4dad8ca --- /dev/null +++ b/requirements/static/ci/py3.11/lint.txt @@ -0,0 +1,682 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py +aiosignal==1.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +ansible-core==2.14.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # ansible +ansible==7.1.0 ; python_version >= "3.9" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # python-telegram-bot +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # certvalidator + # oscrypto +astroid==2.3.3 + # via pylint +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # jsonschema +backports.entry-points-selectable==1.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # -c requirements/static/ci/py3.11/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth + # python-telegram-bot +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # geomet +clustershell==1.8.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/lint.in +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +filelock==3.0.12 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # aiosignal +genshi==0.7.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # kubernetes +hglib==2.6.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.11/linux.txt + # kubernetes +isort==4.3.21 + # via pylint +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +lazy-object-proxy==1.4.3 + # via astroid +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mccabe==0.6.1 + # via pylint +mercurial==6.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +modernize==0.5 + # via saltpylint +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp + # yarl +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # docker +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pathspec==0.9.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # yamllint +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # virtualenv +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # pyasn1-modules + # rsa +pycodestyle==2.5.0 + # via saltpylint +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # twilio +pylint==2.4.4 + # via + # -r requirements/static/ci/lint.in + # saltpylint +pymysql==1.0.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # jsonschema +python-consul==1.1.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # kubernetes + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/zeromq.txt +redis-py-cluster==2.1.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +redis==3.5.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==0.5.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # ansible-core +responses==0.10.6 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # boto3 +saltpylint==2023.8.3 + # via -r requirements/static/ci/lint.in +semantic-version==2.9.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # apscheduler + # astroid + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +slack-bolt==1.15.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +slack-sdk==3.19.5 + # via + # -c requirements/static/ci/py3.11/linux.txt + # slack-bolt +smmap==4.0.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/ci/lint.in +tornado==6.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # python-telegram-bot +twilio==7.9.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +tzlocal==3.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto +wrapt==1.11.1 + # via astroid +xmltodict==0.12.0 + # via + # -c requirements/static/ci/py3.11/linux.txt + # moto +yamllint==1.26.3 + # via + # -c requirements/static/ci/py3.11/linux.txt + # -r requirements/static/ci/linux.in +yarl==1.7.2 + # via + # -c requirements/static/ci/py3.11/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -c requirements/static/ci/py3.11/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/linux-crypto.txt b/requirements/static/ci/py3.11/linux-crypto.txt new file mode 100644 index 000000000000..89873b20c9e1 --- /dev/null +++ b/requirements/static/ci/py3.11/linux-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/linux-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/linux.txt b/requirements/static/ci/py3.11/linux.txt new file mode 100644 index 000000000000..633139c5000b --- /dev/null +++ b/requirements/static/ci/py3.11/linux.txt @@ -0,0 +1,522 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +ansible-core==2.14.1 + # via ansible +ansible==7.1.0 ; python_version >= "3.9" + # via -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via python-telegram-bot +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # google-auth + # python-telegram-bot +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via geomet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/linux.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/linux.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 + # via -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via twilio +pymysql==1.0.2 + # via -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-consul==1.1.0 + # via -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via -r requirements/static/ci/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +redis-py-cluster==2.1.3 + # via -r requirements/static/ci/linux.in +redis==3.5.3 + # via redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==0.5.4 + # via ansible-core +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # apscheduler + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +slack-bolt==1.15.5 + # via -r requirements/static/ci/linux.in +slack-sdk==3.19.5 + # via slack-bolt +smmap==4.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via -r requirements/static/ci/common.in +tornado==6.1 + # via python-telegram-bot +twilio==7.9.2 + # via -r requirements/static/ci/linux.in +typing-extensions==4.8.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +tzlocal==3.0 + # via apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/linux.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.11/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.11/tools-virustotal.txt b/requirements/static/ci/py3.11/tools-virustotal.txt new file mode 100644 index 000000000000..53b23befa512 --- /dev/null +++ b/requirements/static/ci/py3.11/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.11/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../ci/py3.11/tools.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../ci/py3.11/tools.txt + # requests +idna==3.2 + # via + # -c requirements/static/ci/../ci/py3.11/tools.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../ci/py3.11/tools.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../ci/py3.11/tools.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.11/tools.txt b/requirements/static/ci/py3.11/tools.txt new file mode 100644 index 000000000000..acc8dbef504c --- /dev/null +++ b/requirements/static/ci/py3.11/tools.txt @@ -0,0 +1,54 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/tools.txt requirements/static/ci/tools.in +# +attrs==22.1.0 + # via + # -r requirements/static/ci/tools.in + # python-tools-scripts +boto3==1.21.46 + # via -r requirements/static/ci/tools.in +botocore==1.24.46 + # via + # boto3 + # s3transfer +certifi==2023.07.22 + # via requests +charset-normalizer==3.2.0 + # via requests +commonmark==0.9.1 + # via rich +idna==3.2 + # via requests +jinja2==3.1.2 + # via -r requirements/static/ci/tools.in +jmespath==1.0.1 + # via + # boto3 + # botocore +markupsafe==2.1.2 + # via jinja2 +packaging==22.0 + # via -r requirements/static/ci/tools.in +pygments==2.13.0 + # via rich +python-dateutil==2.8.1 + # via botocore +python-tools-scripts==0.18.6 + # via -r requirements/static/ci/tools.in +pyyaml==6.0.1 + # via -r requirements/static/ci/tools.in +requests==2.31.0 + # via python-tools-scripts +rich==12.5.1 + # via python-tools-scripts +s3transfer==0.5.2 + # via boto3 +six==1.16.0 + # via python-dateutil +urllib3==1.26.18 + # via + # botocore + # requests diff --git a/requirements/static/ci/py3.11/windows-crypto.txt b/requirements/static/ci/py3.11/windows-crypto.txt new file mode 100644 index 000000000000..25f318a71ba1 --- /dev/null +++ b/requirements/static/ci/py3.11/windows-crypto.txt @@ -0,0 +1,12 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.37.1 + # via -r requirements/static/ci/crypto.in +parameterized==0.8.1 + # via m2crypto +pycryptodome==3.10.1 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.11/windows.txt b/requirements/static/ci/py3.11/windows.txt new file mode 100644 index 000000000000..3a7a9c755669 --- /dev/null +++ b/requirements/static/ci/py3.11/windows.txt @@ -0,0 +1,498 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.11/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.3.1 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +bcrypt==4.0.1 + # via -r requirements/static/ci/common.in +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # kubernetes + # requests +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # clr-loader + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # requests +cheetah3==3.2.6.post1 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt +click==7.1.2 + # via geomet +clr-loader==0.2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # pythonnet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +colorama==0.4.1 + # via pytest +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # etcd3-py + # moto + # pyopenssl + # requests-ntlm +distlib==0.3.6 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-skip-markers +dmidecode==0.9.0 + # via -r requirements/static/ci/windows.in +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.8.0 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.3 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt +google-auth==2.1.0 + # via kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +iniconfig==1.0.1 + # via pytest +ioloop==0.1a0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # jaraco.collections +jaraco.collections==3.3.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt +lxml==4.9.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +ntlm-auth==1.5.0 + # via requests-ntlm +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +patch==1.16 + # via -r requirements/static/ci/windows.in +pathspec==0.10.2 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.5.4 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.6 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # cffi +pycryptodomex==3.10.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 + # via -r requirements/static/ci/windows.in +pymssql==2.2.7 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +pymysql==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # botocore + # kubernetes + # moto +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +pythonnet==3.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pywin32==305 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # docker + # pytest-skip-markers + # wmi +pywinrm==0.4.1 + # via -r requirements/static/ci/windows.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==25.0.2 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # pywinrm + # requests-ntlm + # responses +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +sed==0.3.1 + # via -r requirements/static/ci/windows.in +semantic-version==2.10.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +six==1.15.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kubernetes + # python-dateutil + # pyvmomi + # pywinrm + # responses + # websocket-client +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.4.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +virtualenv==20.17.0 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +wheel==0.38.4 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +wmi==1.5.1 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # -r requirements/windows.txt +xmltodict==0.12.0 + # via + # moto + # pywinrm +yamllint==1.28.0 + # via -r requirements/static/ci/windows.in +yarl==1.8.1 + # via aiohttp +zc.lockfile==2.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.11/windows.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/changelog.txt b/requirements/static/ci/py3.12/changelog.txt new file mode 100644 index 000000000000..6a252d0cd70b --- /dev/null +++ b/requirements/static/ci/py3.12/changelog.txt @@ -0,0 +1,36 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/changelog.txt requirements/static/ci/changelog.in +# +click-default-group==1.2.2 + # via towncrier +click==7.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # click-default-group + # towncrier +incremental==17.5.0 + # via towncrier +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # towncrier +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/changelog.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/changelog.in +towncrier==22.12.0 + # via -r requirements/static/ci/changelog.in + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/cloud.txt b/requirements/static/ci/py3.12/cloud.txt new file mode 100644 index 000000000000..7fd5de1764ff --- /dev/null +++ b/requirements/static/ci/py3.12/cloud.txt @@ -0,0 +1,685 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +aiosignal==1.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/cloud.in + # -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # geomet +clustershell==1.8.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py + # moto + # paramiko + # pyopenssl + # pyspnego + # requests-ntlm + # smbprotocol + # vcert +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +filelock==3.0.12 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +flaky==3.7.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # aiosignal +genshi==0.7.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # yarl +netaddr==0.7.19 + # via -r requirements/static/ci/cloud.in +ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +pluggy==0.13.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +profitbricks==4.1.3 + # via -r requirements/static/ci/cloud.in +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pypsexec==0.1.0 + # via -r requirements/static/ci/cloud.in +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jsonschema +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol +pytest-custom-exit-code==0.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-salt-factories +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pywinrm==0.3.0 + # via -r requirements/static/ci/cloud.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # profitbricks + # pyvmomi + # pywinrm + # requests-ntlm + # responses + # vcert +responses==0.10.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 +semantic-version==2.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # profitbricks + # pypsexec + # python-dateutil + # pyvmomi + # pywinrm + # responses + # vcert + # virtualenv + # websocket-client +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec +smmap==4.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto + # pywinrm +yarl==1.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/darwin-crypto.txt b/requirements/static/ci/py3.12/darwin-crypto.txt new file mode 100644 index 000000000000..e67841ff8fa7 --- /dev/null +++ b/requirements/static/ci/py3.12/darwin-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/darwin-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/darwin.txt b/requirements/static/ci/py3.12/darwin.txt new file mode 100644 index 000000000000..bee058e2de71 --- /dev/null +++ b/requirements/static/ci/py3.12/darwin.txt @@ -0,0 +1,481 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via -r requirements/static/ci/common.in +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +click==7.0 + # via geomet +clustershell==1.8.1 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # etcd3-py + # moto + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/darwin.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +linode-python==1.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/darwin.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.1 + # via pytest +portend==2.6 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # -r requirements/static/ci/common.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 + # via -r requirements/static/ci/darwin.in +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint + # yamlordereddictloader +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # responses + # vcert + # vultr +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kubernetes + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.2.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +vultr==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # -r requirements/darwin.txt +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/darwin.in +yamlordereddictloader==0.4.0 + # via -r requirements/static/ci/darwin.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.12/darwin.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/docs.txt b/requirements/static/ci/py3.12/docs.txt new file mode 100644 index 000000000000..70ddc3f6eb81 --- /dev/null +++ b/requirements/static/ci/py3.12/docs.txt @@ -0,0 +1,196 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# +alabaster==0.7.12 + # via sphinx +babel==2.9.1 + # via sphinx +certifi==2023.07.22 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +cheroot==8.5.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/docs.in +contextvars==2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +distro==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +docutils==0.19 + # via sphinx +idna==3.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +imagesize==1.4.1 + # via sphinx +immutables==0.15 + # via + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # myst-docutils + # sphinx +jmespath==1.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +linkify-it-py==1.0.3 + # via myst-docutils +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +markdown-it-py==2.2.0 + # via + # mdit-py-plugins + # myst-docutils +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 +mdit-py-plugins==0.3.3 + # via myst-docutils +mdurl==0.1.2 + # via markdown-it-py +more-itertools==5.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +myst-docutils[linkify]==0.18.1 + # via -r requirements/static/ci/docs.in +packaging==22.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # sphinx +portend==2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pyenchant==3.2.2 + # via sphinxcontrib-spelling +pygments==2.14.0 + # via sphinx +pytz==2022.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # babel + # tempora +pyyaml==6.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # myst-docutils +pyzmq==23.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt +requests==2.31.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # sphinx +six==1.16.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # more-itertools + # sphinxcontrib.httpdomain +snowballstemmer==2.1.0 + # via sphinx +sphinx==6.1.3 ; python_version >= "3.9" + # via + # -r requirements/static/ci/docs.in + # sphinxcontrib-spelling + # sphinxcontrib.httpdomain +sphinxcontrib-applehelp==1.0.2 + # via sphinx +sphinxcontrib-devhelp==1.0.2 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 + # via sphinx +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.3 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 + # via sphinx +sphinxcontrib-spelling==7.7.0 + # via -r requirements/static/ci/docs.in +sphinxcontrib.httpdomain==1.8.1 + # via -r requirements/static/ci/docs.in +tempora==4.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # portend +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # myst-docutils +uc-micro-py==1.0.1 + # via linkify-it-py +urllib3==1.26.18 + # via + # -c requirements/static/ci/py3.12/linux.txt + # requests +zc.lockfile==1.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/freebsd-crypto.txt b/requirements/static/ci/py3.12/freebsd-crypto.txt new file mode 100644 index 000000000000..7bdbdbc6cad5 --- /dev/null +++ b/requirements/static/ci/py3.12/freebsd-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/freebsd-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/freebsd.txt b/requirements/static/ci/py3.12/freebsd.txt new file mode 100644 index 000000000000..4a8b0e37df98 --- /dev/null +++ b/requirements/static/ci/py3.12/freebsd.txt @@ -0,0 +1,473 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.24.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # kubernetes + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/freebsd.in +click==7.1.2 + # via geomet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/pkg/freebsd.in + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.2.1.post1 + # via cassandra-driver +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/freebsd.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/freebsd.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/freebsd.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 + # via -r requirements/static/ci/freebsd.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # responses + # vcert +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +smmap==4.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # -r requirements/static/pkg/freebsd.in +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.8.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/freebsd.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.12/freebsd.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/lint.txt b/requirements/static/ci/py3.12/lint.txt new file mode 100644 index 000000000000..1b76109d7393 --- /dev/null +++ b/requirements/static/ci/py3.12/lint.txt @@ -0,0 +1,682 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +aiosignal==1.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +ansible-core==2.14.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # ansible +ansible==7.1.0 ; python_version >= "3.9" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # python-telegram-bot +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator + # oscrypto +astroid==2.3.3 + # via pylint +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # jsonschema +backports.entry-points-selectable==1.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth + # python-telegram-bot +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # geomet +clustershell==1.8.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/lint.in +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +filelock==3.0.12 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +frozenlist==1.3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # aiosignal +genshi==0.7.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +hglib==2.6.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.12/linux.txt + # kubernetes +isort==4.3.21 + # via pylint +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +keyring==5.7.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +lazy-object-proxy==1.4.3 + # via astroid +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mccabe==0.6.1 + # via pylint +mercurial==6.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +modernize==0.5 + # via saltpylint +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +multidict==6.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp + # yarl +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # docker +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +passlib==1.7.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pathspec==0.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # yamllint +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # virtualenv +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # pyasn1-modules + # rsa +pycodestyle==2.5.0 + # via saltpylint +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/crypto.txt +pygit2==1.13.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # twilio +pylint==2.4.4 + # via + # -r requirements/static/ci/lint.in + # saltpylint +pymysql==1.0.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # jsonschema +python-consul==1.1.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # kubernetes + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/zeromq.txt +redis-py-cluster==2.1.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +redis==3.5.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==0.5.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # ansible-core +responses==0.10.6 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +rfc3987==1.3.8 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # boto3 +saltpylint==2023.8.3 + # via -r requirements/static/ci/lint.in +semantic-version==2.9.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # apscheduler + # astroid + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +slack-bolt==1.15.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +slack-sdk==3.19.5 + # via + # -c requirements/static/ci/py3.12/linux.txt + # slack-bolt +smmap==4.0.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/ci/lint.in +tornado==6.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # python-telegram-bot +twilio==7.9.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +tzlocal==3.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # docker + # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +wrapt==1.11.1 + # via astroid +xmltodict==0.12.0 + # via + # -c requirements/static/ci/py3.12/linux.txt + # moto +yamllint==1.26.3 + # via + # -c requirements/static/ci/py3.12/linux.txt + # -r requirements/static/ci/linux.in +yarl==1.7.2 + # via + # -c requirements/static/ci/py3.12/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -c requirements/static/ci/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/linux-crypto.txt b/requirements/static/ci/py3.12/linux-crypto.txt new file mode 100644 index 000000000000..be01a017e8bf --- /dev/null +++ b/requirements/static/ci/py3.12/linux-crypto.txt @@ -0,0 +1,10 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/linux-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.38.0 + # via -r requirements/static/ci/crypto.in +pycryptodome==3.9.7 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/linux.txt b/requirements/static/ci/py3.12/linux.txt new file mode 100644 index 000000000000..7873144f0523 --- /dev/null +++ b/requirements/static/ci/py3.12/linux.txt @@ -0,0 +1,522 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.2.0 + # via aiohttp +ansible-core==2.14.1 + # via ansible +ansible==7.1.0 ; python_version >= "3.9" + # via -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +apscheduler==3.6.3 + # via python-telegram-bot +asn1crypto==1.3.0 + # via + # certvalidator + # oscrypto +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +backports.entry-points-selectable==1.1.0 + # via virtualenv +bcrypt==3.1.6 + # via + # -r requirements/static/ci/common.in + # paramiko +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==4.2.2 + # via + # google-auth + # python-telegram-bot +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/common.in + # kubernetes + # python-telegram-bot + # requests +certvalidator==0.11.1 + # via vcert +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/common.in + # bcrypt + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # requests +cheetah3==3.2.6.post2 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in +click==7.1.1 + # via geomet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # ansible-core + # etcd3-py + # moto + # paramiko + # pyopenssl + # vcert +distlib==0.3.2 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # pytest-skip-markers +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.0.12 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.0 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/static/ci/common.in +google-auth==2.1.0 + # via kubernetes +hglib==2.6.1 + # via -r requirements/static/ci/linux.in +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via pytest +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # jaraco.collections +jaraco.collections==3.4.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mercurial==6.0.1 + # via -r requirements/static/ci/linux.in +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +oscrypto==1.2.0 + # via certvalidator +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # docker + # pytest +paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via -r requirements/static/ci/common.in +passlib==1.7.4 + # via -r requirements/static/ci/common.in +pathspec==0.9.0 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.2.0 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/ci/common.in + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 + # via -r requirements/static/ci/linux.in +pyiface==0.0.11 + # via -r requirements/static/ci/linux.in +pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" + # via -r requirements/static/ci/common.in +pyjwt==2.4.0 + # via twilio +pymysql==1.0.2 + # via -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.4.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==1.4.2 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-consul==1.1.0 + # via -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in + # botocore + # croniter + # kubernetes + # moto + # vcert +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via -r requirements/static/ci/linux.in +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # apscheduler + # moto + # python-telegram-bot + # tempora + # twilio +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # ansible-core + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +redis-py-cluster==2.1.3 + # via -r requirements/static/ci/linux.in +redis==3.5.3 + # via redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # apache-libcloud + # docker + # etcd3-py + # kubernetes + # moto + # python-consul + # pyvmomi + # responses + # twilio + # vcert +resolvelib==0.5.4 + # via ansible-core +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rpm-vercmp==0.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +semantic-version==2.9.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # apscheduler + # bcrypt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kazoo + # kubernetes + # more-itertools + # paramiko + # python-consul + # python-dateutil + # pyvmomi + # responses + # vcert + # virtualenv + # websocket-client +slack-bolt==1.15.5 + # via -r requirements/static/ci/linux.in +slack-sdk==3.19.5 + # via slack-bolt +smmap==4.0.0 + # via gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # -r requirements/static/pkg/linux.in +toml==0.10.2 + # via -r requirements/static/ci/common.in +tornado==6.1 + # via python-telegram-bot +twilio==7.9.2 + # via -r requirements/static/ci/linux.in +typing-extensions==4.8.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +tzlocal==3.0 + # via apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +vcert==0.7.4 ; sys_platform != "win32" + # via -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +xmltodict==0.12.0 + # via moto +yamllint==1.26.3 + # via -r requirements/static/ci/linux.in +yarl==1.7.2 + # via aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # cherrypy +zipp==3.6.0 + # via + # -c requirements/static/ci/../pkg/py3.12/linux.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.12/tools-virustotal.txt b/requirements/static/ci/py3.12/tools-virustotal.txt new file mode 100644 index 000000000000..d3ec2bf101ad --- /dev/null +++ b/requirements/static/ci/py3.12/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.12/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../ci/py3.12/tools.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../ci/py3.12/tools.txt + # requests +idna==3.2 + # via + # -c requirements/static/ci/../ci/py3.12/tools.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../ci/py3.12/tools.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../ci/py3.12/tools.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.12/tools.txt b/requirements/static/ci/py3.12/tools.txt new file mode 100644 index 000000000000..7531a07f5be5 --- /dev/null +++ b/requirements/static/ci/py3.12/tools.txt @@ -0,0 +1,54 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/tools.txt requirements/static/ci/tools.in +# +attrs==22.1.0 + # via + # -r requirements/static/ci/tools.in + # python-tools-scripts +boto3==1.21.46 + # via -r requirements/static/ci/tools.in +botocore==1.24.46 + # via + # boto3 + # s3transfer +certifi==2023.07.22 + # via requests +charset-normalizer==3.2.0 + # via requests +commonmark==0.9.1 + # via rich +idna==3.2 + # via requests +jinja2==3.1.2 + # via -r requirements/static/ci/tools.in +jmespath==1.0.1 + # via + # boto3 + # botocore +markupsafe==2.1.2 + # via jinja2 +packaging==22.0 + # via -r requirements/static/ci/tools.in +pygments==2.13.0 + # via rich +python-dateutil==2.8.1 + # via botocore +python-tools-scripts==0.18.6 + # via -r requirements/static/ci/tools.in +pyyaml==6.0.1 + # via -r requirements/static/ci/tools.in +requests==2.31.0 + # via python-tools-scripts +rich==12.5.1 + # via python-tools-scripts +s3transfer==0.5.2 + # via boto3 +six==1.16.0 + # via python-dateutil +urllib3==1.26.18 + # via + # botocore + # requests diff --git a/requirements/static/ci/py3.12/windows-crypto.txt b/requirements/static/ci/py3.12/windows-crypto.txt new file mode 100644 index 000000000000..ec84d96324e0 --- /dev/null +++ b/requirements/static/ci/py3.12/windows-crypto.txt @@ -0,0 +1,12 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/windows-crypto.txt requirements/static/ci/crypto.in +# +m2crypto==0.37.1 + # via -r requirements/static/ci/crypto.in +parameterized==0.8.1 + # via m2crypto +pycryptodome==3.10.1 + # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.12/windows.txt b/requirements/static/ci/py3.12/windows.txt new file mode 100644 index 000000000000..863e89886965 --- /dev/null +++ b/requirements/static/ci/py3.12/windows.txt @@ -0,0 +1,498 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.12/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# +aiohttp==3.9.0 + # via etcd3-py +aiosignal==1.3.1 + # via aiohttp +attrs==23.1.0 + # via + # aiohttp + # jsonschema + # pytest + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-system-statistics +bcrypt==4.0.1 + # via -r requirements/static/ci/common.in +boto3==1.21.46 + # via + # -r requirements/static/ci/common.in + # moto +boto==2.49.0 + # via -r requirements/static/ci/common.in +botocore==1.24.46 + # via + # boto3 + # moto + # s3transfer +cachetools==3.1.0 + # via google-auth +cassandra-driver==3.23.0 + # via -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # kubernetes + # requests +cffi==1.14.6 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # clr-loader + # cryptography + # pygit2 + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # requests +cheetah3==3.2.6.post1 + # via -r requirements/static/ci/common.in +cheroot==8.5.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cherrypy +cherrypy==18.6.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt +click==7.1.2 + # via geomet +clr-loader==0.2.6 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # pythonnet +clustershell==1.8.3 + # via -r requirements/static/ci/common.in +colorama==0.4.1 + # via pytest +contextvars==2.4 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # etcd3-py + # moto + # pyopenssl + # requests-ntlm +distlib==0.3.6 + # via virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # pytest-skip-markers +dmidecode==0.9.0 + # via -r requirements/static/ci/windows.in +dnspython==1.16.0 + # via + # -r requirements/static/ci/common.in + # python-etcd +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 + # via -r requirements/static/ci/common.in +filelock==3.8.0 + # via virtualenv +flaky==3.7.0 + # via -r requirements/pytest.txt +frozenlist==1.3.3 + # via + # aiohttp + # aiosignal +genshi==0.7.5 + # via -r requirements/static/ci/common.in +geomet==0.1.2 + # via cassandra-driver +gitdb==4.0.7 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt +google-auth==2.1.0 + # via kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # etcd3-py + # requests + # yarl +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # contextvars +importlib-metadata==6.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +iniconfig==1.0.1 + # via pytest +ioloop==0.1a0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +ipaddress==1.0.22 + # via kubernetes +jaraco.classes==3.2.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # jaraco.collections +jaraco.collections==3.3.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # jaraco.collections +jinja2==3.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # moto +jmespath==1.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # boto3 + # botocore +jsonschema==3.2.0 + # via -r requirements/static/ci/common.in +keyring==5.7.1 + # via -r requirements/static/ci/common.in +kubernetes==3.0.0 + # via -r requirements/static/ci/common.in +looseversion==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt +lxml==4.9.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # jinja2 + # mako + # moto + # werkzeug +mock==5.1.0 + # via -r requirements/pytest.txt +more-itertools==8.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/pytest.txt + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +moto==3.0.1 + # via -r requirements/static/ci/common.in +msgpack==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # pytest-salt-factories +multidict==6.0.2 + # via + # aiohttp + # yarl +ntlm-auth==1.5.0 + # via requests-ntlm +packaging==22.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # docker + # pytest +passlib==1.7.4 + # via -r requirements/static/ci/common.in +patch==1.16 + # via -r requirements/static/ci/windows.in +pathspec==0.10.2 + # via yamllint +pathtools==0.1.2 + # via watchdog +platformdirs==2.5.4 + # via virtualenv +pluggy==0.13.0 + # via pytest +portend==2.6 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cherrypy +psutil==5.8.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pyasn1-modules==0.2.4 + # via google-auth +pyasn1==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # pyasn1-modules + # rsa +pycparser==2.21 ; python_version >= "3.9" + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # cffi +pycryptodomex==3.10.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 + # via -r requirements/static/ci/windows.in +pymssql==2.2.7 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +pymysql==1.0.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # etcd3-py +pyrsistent==0.17.3 + # via jsonschema +pytest-custom-exit-code==0.3.0 + # via -r requirements/pytest.txt +pytest-helpers-namespace==2021.12.29 + # via + # -r requirements/pytest.txt + # pytest-salt-factories + # pytest-shell-utilities +pytest-httpserver==1.0.8 + # via -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 + # via pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # pytest-salt-factories + # pytest-shell-utilities + # pytest-system-statistics +pytest-subtests==0.4.0 + # via -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via pytest-salt-factories +pytest-timeout==2.1.0 + # via -r requirements/pytest.txt +pytest==7.2.0 + # via + # -r requirements/pytest.txt + # pytest-custom-exit-code + # pytest-helpers-namespace + # pytest-salt-factories + # pytest-shell-utilities + # pytest-skip-markers + # pytest-subtests + # pytest-system-statistics + # pytest-timeout +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # botocore + # kubernetes + # moto +python-etcd==0.4.5 + # via -r requirements/static/ci/common.in +python-gnupg==0.4.8 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +pythonnet==3.0.3 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +pytz==2022.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # moto + # tempora +pyvmomi==6.7.1.2018.12 + # via -r requirements/static/ci/common.in +pywin32==306 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # docker + # pytest-skip-markers + # wmi +pywinrm==0.4.1 + # via -r requirements/static/ci/windows.in +pyyaml==6.0.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # clustershell + # kubernetes + # pytest-salt-factories + # yamllint +pyzmq==25.0.2 ; sys_platform == "win32" + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/zeromq.txt + # pytest-salt-factories +requests-ntlm==1.1.0 + # via pywinrm +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/base.txt + # -r requirements/static/ci/common.in + # -r requirements/windows.txt + # docker + # etcd3-py + # kubernetes + # moto + # pyvmomi + # pywinrm + # requests-ntlm + # responses +responses==0.10.6 + # via moto +rfc3987==1.3.8 + # via -r requirements/static/ci/common.in +rsa==4.7.2 + # via google-auth +s3transfer==0.5.2 + # via boto3 +sed==0.3.1 + # via -r requirements/static/ci/windows.in +semantic-version==2.10.0 + # via etcd3-py +setproctitle==1.3.2 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +six==1.15.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cassandra-driver + # cheroot + # etcd3-py + # genshi + # geomet + # jsonschema + # kubernetes + # python-dateutil + # pyvmomi + # pywinrm + # responses + # websocket-client +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # gitdb +sqlparse==0.4.4 + # via -r requirements/static/ci/common.in +strict-rfc3339==0.7 + # via -r requirements/static/ci/common.in +tempora==4.1.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # portend +timelib==0.2.5 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +toml==0.10.2 + # via -r requirements/static/ci/common.in +typing-extensions==4.4.0 + # via + # pytest-shell-utilities + # pytest-system-statistics +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt + # botocore + # docker + # kubernetes + # python-etcd + # requests +virtualenv==20.17.0 + # via + # -r requirements/static/ci/common.in + # pytest-salt-factories +watchdog==0.10.3 + # via -r requirements/static/ci/common.in +websocket-client==0.40.0 + # via + # docker + # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # moto + # pytest-httpserver +wheel==0.38.4 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +wmi==1.5.1 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # -r requirements/windows.txt +xmltodict==0.12.0 + # via + # moto + # pywinrm +yamllint==1.28.0 + # via -r requirements/static/ci/windows.in +yarl==1.8.1 + # via aiohttp +zc.lockfile==2.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # cherrypy +zipp==3.12.0 + # via + # -c requirements/static/ci/../pkg/py3.12/windows.txt + # importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/ci/py3.7/cloud.txt b/requirements/static/ci/py3.7/cloud.txt index 6b4e2129b380..43b872f936d5 100644 --- a/requirements/static/ci/py3.7/cloud.txt +++ b/requirements/static/ci/py3.7/cloud.txt @@ -2,30 +2,37 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/cloud.txt --pip-args='--constraint=requirements/static/ci/py3.7/linux.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.7 +aiohttp==3.8.6 # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 - # via etcd3-py + # -c requirements/static/ci/py3.7/linux.txt + # etcd3-py aiosignal==1.2.0 - # via aiohttp + # via + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp apache-libcloud==2.5.0 ; sys_platform != "win32" # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/cloud.in # -r requirements/static/ci/common.in -asn1crypto==1.4.0 +asn1crypto==1.3.0 # via + # -c requirements/static/ci/py3.7/linux.txt # certvalidator # oscrypto async-timeout==4.0.2 - # via aiohttp + # via + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp asynctest==0.13.0 - # via aiohttp -attrs==21.2.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # jsonschema # pytest @@ -33,731 +40,516 @@ attrs==21.2.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.27 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.52 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 - # via virtualenv -bcrypt==3.2.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # moto boto==2.49.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in botocore==1.24.46 # via + # -c requirements/static/ci/py3.7/linux.txt # boto3 # moto # s3transfer cachetools==4.2.2 - # via google-auth -cassandra-driver==3.25.0 - # via -r requirements/static/ci/common.in -certifi==2022.12.7 # via + # -c requirements/static/ci/py3.7/linux.txt + # google-auth +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 - # via vcert + # via + # -c requirements/static/ci/py3.7/linux.txt + # vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp -cheetah3==3.2.6.post1 - # via -r requirements/static/ci/common.in +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in -ciscoconfparse==1.5.46 - # via napalm -click==8.0.1 - # via geomet +ciscoconfparse==1.5.19 + # via + # -c requirements/static/ci/py3.7/linux.txt + # napalm +click==7.1.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # geomet clustershell==1.8.3 - # via -r requirements/static/ci/common.in -colorama==0.4.4 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +colorama==0.4.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt -croniter==1.0.15 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -cryptography==3.3.2 - # via - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in # etcd3-py # moto # paramiko # pyopenssl + # pyspnego # requests-ntlm # smbprotocol # vcert -distlib==0.3.3 - # via virtualenv -distro==1.6.0 +distlib==0.3.2 # via + # -c requirements/static/ci/py3.7/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # pytest-skip-markers -dnspython==2.1.0 +dnspython==1.16.0 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.2 +docker==6.1.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 - # via pytest + # via + # -c requirements/static/ci/py3.7/linux.txt + # pytest filelock==3.0.12 - # via virtualenv + # via + # -c requirements/static/ci/py3.7/linux.txt + # virtualenv flaky==3.7.0 - # via -r requirements/pytest.txt + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt frozenlist==1.3.0 # via + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # aiosignal future==0.18.3 # via + # -c requirements/static/ci/py3.7/linux.txt # napalm # textfsm genshi==0.7.5 - # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 - # via cassandra-driver + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in google-auth==2.1.0 - # via kubernetes -idna==2.8 # via + # -c requirements/static/ci/py3.7/linux.txt + # kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # etcd3-py # requests # yarl -immutables==0.16 - # via contextvars -importlib-metadata==4.8.1 +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # contextvars +importlib-metadata==4.6.4 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/pkg/linux.in + # attrs # backports.entry-points-selectable - # click # jsonschema # mako # moto # pluggy # pytest # virtualenv -iniconfig==1.1.1 - # via pytest -ipaddress==1.0.23 - # via kubernetes -isodate==0.6.0 - # via msrest +iniconfig==1.0.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # pytest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.7/linux.txt + # kubernetes jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy -jaraco.functools==3.3.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # junos-eznc # moto # napalm -jmespath==0.10.0 +jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 # botocore jsonschema==3.2.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in junos-eznc==2.4.0 ; sys_platform != "win32" and python_version <= "3.10" # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # napalm -jxmlease==1.0.3 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -kazoo==2.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in keyring==5.7.1 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in kubernetes==3.0.0 - # via -r requirements/static/ci/common.in -libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in -loguru==0.6.0 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc # napalm # ncclient mako==1.2.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # jinja2 # mako # moto # werkzeug -mock==4.0.3 - # via -r requirements/pytest.txt -more-itertools==8.8.0 +mock==5.1.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +moto==3.0.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.21 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # yarl -napalm==3.3.1 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" - # via -r requirements/static/ci/common.in -ncclient==0.6.12 +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +ncclient==0.6.4 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc - # napalm netaddr==0.7.19 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/cloud.in # junos-eznc # napalm # pyeapi -netmiko==3.4.0 - # via napalm -ntc-templates==2.3.2 +netmiko==3.2.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # napalm +ntc-templates==1.4.0 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc - # netmiko ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.2.0 # via - # requests-ntlm - # smbprotocol -oauthlib==3.2.2 - # via requests-oauthlib -oscrypto==1.2.1 - # via certvalidator -packaging==21.3 + # -c requirements/static/ci/py3.7/linux.txt + # certvalidator +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # junos-eznc # napalm # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse -platformdirs==2.3.0 - # via virtualenv -pluggy==1.0.0 - # via pytest -portend==2.7.1 - # via cherrypy +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # virtualenv +pluggy==0.13.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cherrypy profitbricks==4.1.3 # via -r requirements/static/ci/cloud.in psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pyasn1-modules==0.2.8 - # via google-auth +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.7/linux.txt + # google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/py3.7/linux.txt # pyasn1-modules # rsa - # smbprotocol -pycparser==2.19 - # via cffi -pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pyeapi==0.8.4 - # via napalm +pycparser==2.17 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/crypto.txt +pyeapi==0.8.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # napalm +pyfakefs==5.3.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" - # via -r requirements/static/ci/common.in -pyjwt==2.4.0 - # via adal -pynacl==1.4.0 - # via paramiko -pyopenssl==20.0.1 # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc - # packaging pypsexec==0.1.0 # via -r requirements/static/ci/cloud.in -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # jsonschema +pyserial==3.4 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc # netmiko +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol pytest-custom-exit-code==0.3.0 - # via -r requirements/pytest.txt + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt pytest-helpers-namespace==2021.4.29 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 - # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" - # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 - # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-httpserver==1.0.6 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # -c requirements/static/ci/py3.7/linux.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.5.0 - # via -r requirements/pytest.txt -pytest-system-statistics==1.0.2 - # via pytest-salt-factories -pytest-tempdir==2019.10.12 +pytest-subtests==0.4.0 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.7/linux.txt # pytest-salt-factories -pytest-timeout==2.0.2 - # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/pytest.txt # pytest-custom-exit-code # pytest-helpers-namespace @@ -766,191 +558,257 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout -python-dateutil==2.8.2 +python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes # moto # vcert python-etcd==0.4.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # moto # tempora -pyvmomi==7.0.2 - # via -r requirements/static/ci/common.in +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/zeromq.txt # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # profitbricks # pyvmomi # pywinrm # requests-ntlm - # requests-oauthlib # responses # vcert -responses==0.14.0 - # via moto +responses==0.10.6 + # via + # -c requirements/static/ci/py3.7/linux.txt + # moto rfc3987==1.3.8 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 - # via google-auth -s3transfer==0.5.0 - # via boto3 -scp==0.14.1 # via + # -c requirements/static/ci/py3.7/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # boto3 +scp==0.13.2 + # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc # napalm # netmiko semantic-version==2.9.0 - # via etcd3-py + # via + # -c requirements/static/ci/py3.7/linux.txt + # etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # msrestazure + # more-itertools # ncclient # paramiko # profitbricks - # pynacl - # pyopenssl # pypsexec # python-dateutil # pyvmomi # pywinrm # responses - # smbprotocol # textfsm # transitions # vcert # virtualenv # websocket-client -smbprotocol==0.1.1 - # via pypsexec +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in strict-rfc3339==0.7 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in tempora==4.1.1 - # via portend -tenacity==8.0.1 - # via netmiko -textfsm==1.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # portend +terminal==0.4.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # ntc-templates +textfsm==1.1.0 + # via + # -c requirements/static/ci/py3.7/linux.txt # napalm + # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in tomli==2.0.1 - # via pytest + # via + # -c requirements/static/ci/py3.7/linux.txt + # pytest transitions==0.8.9 - # via junos-eznc + # via + # -c requirements/static/ci/py3.7/linux.txt + # junos-eznc typing-extensions==3.10.0.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # async-timeout # gitpython - # immutables # importlib-metadata # pytest-shell-utilities # pytest-system-statistics # yarl -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # botocore + # docker # kubernetes # python-etcd # requests - # responses vcert==0.7.4 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -virtualenv==20.8.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # pytest-salt-factories -watchdog==2.1.5 - # via -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in websocket-client==0.40.0 # via + # -c requirements/static/ci/py3.7/linux.txt # docker # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in werkzeug==2.2.3 # via + # -c requirements/static/ci/py3.7/linux.txt # moto # pytest-httpserver xmltodict==0.12.0 # via + # -c requirements/static/ci/py3.7/linux.txt # moto # pywinrm yamlordereddictloader==0.4.0 - # via junos-eznc + # via + # -c requirements/static/ci/py3.7/linux.txt + # junos-eznc yarl==1.7.2 - # via aiohttp -zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.7/docs.txt b/requirements/static/ci/py3.7/docs.txt index eb989910d526..c7ff9451ad78 100644 --- a/requirements/static/ci/py3.7/docs.txt +++ b/requirements/static/ci/py3.7/docs.txt @@ -2,17 +2,17 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt # alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx -certifi==2022.12.7 +certifi==2023.07.22 # via # -c requirements/static/ci/py3.7/linux.txt # requests -chardet==3.0.4 +charset-normalizer==3.2.0 # via # -c requirements/static/ci/py3.7/linux.txt # requests @@ -34,7 +34,7 @@ distro==1.5.0 # -r requirements/base.txt docutils==0.16 # via sphinx -idna==2.8 +idna==3.2 # via # -c requirements/static/ci/py3.7/linux.txt # requests @@ -82,7 +82,7 @@ looseversion==1.0.2 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt -markdown-it-py==2.1.0 +markdown-it-py==2.2.0 # via # mdit-py-plugins # myst-docutils @@ -108,7 +108,7 @@ msgpack==1.0.2 # -r requirements/base.txt myst-docutils[linkify]==0.18.1 # via -r requirements/static/ci/docs.in -packaging==21.3 +packaging==22.0 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt @@ -129,16 +129,12 @@ pyenchant==3.2.2 # via sphinxcontrib-spelling pygments==2.8.1 # via sphinx -pyparsing==3.0.9 - # via - # -c requirements/static/ci/py3.7/linux.txt - # packaging pytz==2022.1 # via # -c requirements/static/ci/py3.7/linux.txt # babel # tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt @@ -147,7 +143,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt @@ -193,7 +189,7 @@ typing-extensions==3.10.0.0 # myst-docutils uc-micro-py==1.0.1 # via linkify-it-py -urllib3==1.26.6 +urllib3==1.26.18 # via # -c requirements/static/ci/py3.7/linux.txt # requests diff --git a/requirements/static/ci/py3.7/freebsd-crypto.txt b/requirements/static/ci/py3.7/freebsd-crypto.txt index 7c8974d75e9d..d844d2f97162 100644 --- a/requirements/static/ci/py3.7/freebsd-crypto.txt +++ b/requirements/static/ci/py3.7/freebsd-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/freebsd-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.7/freebsd.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/freebsd-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.7/freebsd.txt b/requirements/static/ci/py3.7/freebsd.txt index 8740222b714d..cca81480e6df 100644 --- a/requirements/static/ci/py3.7/freebsd.txt +++ b/requirements/static/ci/py3.7/freebsd.txt @@ -2,13 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/freebsd.txt --pip-args='--constraint=requirements/static/pkg/py3.7/freebsd.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -adal==1.2.5 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.8.6 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -22,7 +18,7 @@ async-timeout==4.0.2 # via aiohttp asynctest==0.13.0 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -31,311 +27,13 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.26 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.51 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -346,37 +44,43 @@ botocore==1.24.46 # boto3 # moto # s3transfer +cached-property==1.5.2 + # via pygit2 cachetools==3.1.0 # via google-auth cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/freebsd.in ciscoconfparse==1.5.19 @@ -388,15 +92,15 @@ clustershell==1.8.3 colorama==0.4.3 # via ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/static/pkg/freebsd.in # etcd3-py # moto # paramiko @@ -406,6 +110,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt # -r requirements/static/pkg/freebsd.in # pytest-skip-markers @@ -414,11 +119,9 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -438,24 +141,29 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.2.1.post1 # via cassandra-driver -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/freebsd.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # contextvars importlib-metadata==4.6.4 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/static/pkg/freebsd.in + # attrs # backports.entry-points-selectable # jsonschema # mako @@ -467,27 +175,34 @@ iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt # junos-eznc # moto # napalm jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -509,7 +224,9 @@ kubernetes==3.0.0 libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc @@ -519,6 +236,7 @@ mako==1.2.2 # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt # jinja2 # mako @@ -526,117 +244,28 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/freebsd.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.19 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp # yarl -napalm==3.1.0 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" # via -r requirements/static/ci/common.in ncclient==0.6.4 # via junos-eznc @@ -649,13 +278,13 @@ netmiko==3.2.0 # via napalm ntc-templates==1.4.0 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -665,7 +294,7 @@ paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via # -r requirements/static/ci/common.in # ciscoconfparse @@ -678,9 +307,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -692,27 +324,32 @@ pyasn1==0.4.8 # pyasn1-modules # rsa pycparser==2.17 - # via cffi + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/crypto.txt pyeapi==0.8.3 # via napalm -pygit2==1.8.0 ; python_version >= "3.7" +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.10.1 # via -r requirements/static/ci/freebsd.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in -pyjwt==2.4.0 - # via adal -pynacl==1.3.0 - # via paramiko -pyopenssl==19.1.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/static/pkg/freebsd.in # etcd3-py pyparsing==3.0.9 - # via - # junos-eznc - # packaging + # via junos-eznc pyrsistent==0.17.3 # via jsonschema pyserial==3.4 @@ -726,13 +363,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -741,13 +378,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -757,14 +390,11 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/static/pkg/freebsd.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -773,48 +403,44 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/static/pkg/freebsd.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # pyvmomi - # requests-oauthlib # responses # vcert responses==0.10.6 @@ -833,29 +459,25 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/static/pkg/freebsd.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # mock # more-itertools - # msrestazure # ncclient # paramiko - # pynacl - # pyopenssl # python-dateutil # pyvmomi # responses @@ -864,14 +486,16 @@ six==1.16.0 # vcert # virtualenv # websocket-client -smmap==3.0.4 +smmap==4.0.0 # via gitdb -sqlparse==0.4.2 +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # portend terminal==0.4.0 # via ntc-templates textfsm==1.1.0 @@ -880,15 +504,18 @@ textfsm==1.1.0 # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # -r requirements/static/pkg/freebsd.in toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc typing-extensions==3.10.0.0 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # aiohttp # async-timeout # gitpython @@ -896,9 +523,11 @@ typing-extensions==3.10.0.0 # pytest-shell-utilities # pytest-system-statistics # yarl -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -914,6 +543,8 @@ websocket-client==0.40.0 # via # docker # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in werkzeug==2.2.3 # via # moto @@ -927,9 +558,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==1.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.7/freebsd.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.7/invoke.txt b/requirements/static/ci/py3.7/invoke.txt deleted file mode 100644 index e2cad5c72e0d..000000000000 --- a/requirements/static/ci/py3.7/invoke.txt +++ /dev/null @@ -1,14 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements/static/ci/py3.7/invoke.txt --pip-args='--constraint=requirements/static/ci/py3.7/linux.txt' requirements/static/ci/invoke.in -# -blessings==1.7 - # via -r requirements/static/ci/invoke.in -invoke==1.4.1 - # via -r requirements/static/ci/invoke.in -pyyaml==5.4.1 - # via -r requirements/static/ci/invoke.in -six==1.16.0 - # via blessings diff --git a/requirements/static/ci/py3.7/lint.txt b/requirements/static/ci/py3.7/lint.txt index ee878c1e38df..5ad5a03f4f68 100644 --- a/requirements/static/ci/py3.7/lint.txt +++ b/requirements/static/ci/py3.7/lint.txt @@ -2,527 +2,351 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/lint.txt --pip-args='--constraint=requirements/static/ci/py3.7/linux.txt' requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.7 +aiohttp==3.8.6 # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 - # via etcd3-py + # -c requirements/static/ci/py3.7/linux.txt + # etcd3-py aiosignal==1.2.0 - # via aiohttp -ansible-core==2.11.4 - # via ansible + # via + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp +ansible-core==2.11.7 + # via + # -c requirements/static/ci/py3.7/linux.txt + # ansible ansible==4.4.0 ; python_version < "3.9" - # via -r requirements/static/ci/linux.in -apache-libcloud==3.3.1 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in apscheduler==3.6.3 - # via python-telegram-bot -asn1crypto==1.4.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # python-telegram-bot +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.7/linux.txt # certvalidator # oscrypto astroid==2.3.3 # via pylint async-timeout==4.0.2 - # via aiohttp + # via + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp asynctest==0.13.0 - # via aiohttp -attrs==21.2.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # jsonschema -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.27 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.52 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 - # via virtualenv + # via + # -c requirements/static/ci/py3.7/linux.txt + # virtualenv backports.zoneinfo==0.2.1 - # via tzlocal -bcrypt==3.2.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # tzlocal +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # moto boto==2.49.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in botocore==1.24.46 # via + # -c requirements/static/ci/py3.7/linux.txt # boto3 # moto # s3transfer +cached-property==1.5.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # pygit2 cachetools==4.2.2 # via + # -c requirements/static/ci/py3.7/linux.txt # google-auth # python-telegram-bot -cassandra-driver==3.25.0 - # via -r requirements/static/ci/common.in -certifi==2022.12.7 +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # python-telegram-bot # requests certvalidator==0.11.1 - # via vcert + # via + # -c requirements/static/ci/py3.7/linux.txt + # vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -charset-normalizer==2.0.4 +charset-normalizer==3.2.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # requests -cheetah3==3.2.6.post1 - # via -r requirements/static/ci/common.in +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in -ciscoconfparse==1.5.46 - # via napalm -click==8.0.1 - # via geomet +ciscoconfparse==1.5.19 + # via + # -c requirements/static/ci/py3.7/linux.txt + # napalm +click==7.1.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # geomet clustershell==1.8.3 - # via -r requirements/static/ci/common.in -colorama==0.4.4 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +colorama==0.4.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt -croniter==1.0.15 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -cryptography==3.4.7 # via - # adal + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in # ansible-core - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko # pyopenssl # vcert distlib==0.3.2 - # via virtualenv -distro==1.6.0 - # via -r requirements/base.txt -dnspython==2.1.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/base.txt +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.0 - # via -r requirements/static/ci/common.in -etcd3-py==0.1.6 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +docker==6.1.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/lint.in +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in filelock==3.0.12 - # via virtualenv + # via + # -c requirements/static/ci/py3.7/linux.txt + # virtualenv frozenlist==1.3.0 # via + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # aiosignal future==0.18.3 # via + # -c requirements/static/ci/py3.7/linux.txt # napalm # textfsm genshi==0.7.5 - # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 - # via cassandra-driver + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" - # via -r requirements/static/ci/common.in -google-auth==2.0.1 - # via kubernetes + # via + # -c requirements/static/ci/py3.7/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # kubernetes hglib==2.6.1 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # etcd3-py # requests # yarl -immutables==0.16 - # via contextvars +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # contextvars importlib-metadata==4.6.4 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/pkg/linux.in + # attrs # backports.entry-points-selectable - # click # jsonschema # mako # moto # virtualenv -ipaddress==1.0.23 - # via kubernetes -isodate==0.6.0 - # via msrest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.7/linux.txt + # kubernetes isort==4.3.21 # via pylint jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy -jaraco.functools==3.3.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # ansible-core # junos-eznc # moto # napalm -jmespath==0.10.0 +jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 # botocore jsonschema==3.2.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in junos-eznc==2.4.0 ; sys_platform != "win32" and python_version <= "3.10" # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # napalm -jxmlease==1.0.3 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -kazoo==2.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in keyring==5.7.1 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in kubernetes==3.0.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in lazy-object-proxy==1.4.3 # via astroid -libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in -loguru==0.6.0 - # via ciscoconfparse +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc # napalm # ncclient mako==1.2.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # jinja2 # mako @@ -531,236 +355,219 @@ markupsafe==2.1.2 mccabe==0.6.1 # via pylint mercurial==6.0.1 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in modernize==0.5 # via saltpylint -more-itertools==8.8.0 +more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +moto==3.0.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in msgpack==1.0.2 - # via -r requirements/base.txt -msrest==0.6.21 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/base.txt multidict==6.0.2 # via + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # yarl -napalm==3.3.1 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" - # via -r requirements/static/ci/common.in -ncclient==0.6.12 +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +ncclient==0.6.4 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc - # napalm -netaddr==0.8.0 +netaddr==0.7.19 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc # napalm # pyeapi -netmiko==3.4.0 - # via napalm -ntc-templates==2.2.2 +netmiko==3.2.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # napalm +ntc-templates==1.4.0 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc - # netmiko -oauthlib==3.2.2 - # via requests-oauthlib -oscrypto==1.2.1 - # via certvalidator -packaging==21.3 +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # certvalidator +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # ansible-core + # docker paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # junos-eznc # napalm # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse pathspec==0.9.0 - # via yamllint + # via + # -c requirements/static/ci/py3.7/linux.txt + # yamllint +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # watchdog platformdirs==2.2.0 - # via virtualenv -portend==2.7.1 - # via cherrypy + # via + # -c requirements/static/ci/py3.7/linux.txt + # virtualenv +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cherrypy psutil==5.8.0 - # via -r requirements/base.txt -pyasn1-modules==0.2.8 - # via google-auth + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.7/linux.txt + # google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/py3.7/linux.txt # pyasn1-modules # rsa pycodestyle==2.5.0 # via saltpylint -pycparser==2.20 - # via cffi -pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pyeapi==0.8.4 - # via napalm -pygit2==1.0.3 ; python_version <= "3.8" - # via -r requirements/static/ci/linux.in +pycparser==2.17 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/crypto.txt +pyeapi==0.8.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # napalm +pygit2==1.10.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in pyiface==0.0.11 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in pyjwt==2.4.0 # via - # adal + # -c requirements/static/ci/py3.7/linux.txt # twilio pylint==2.4.4 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 ; python_version > "3.5" - # via -r requirements/static/ci/linux.in -pynacl==1.4.0 - # via paramiko -pyopenssl==20.0.1 +pymysql==1.0.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc - # packaging -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 +pyrsistent==0.17.3 # via + # -c requirements/static/ci/py3.7/linux.txt + # jsonschema +pyserial==3.4 + # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc # netmiko python-consul==1.1.0 - # via -r requirements/static/ci/linux.in -python-dateutil==2.8.2 # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes # moto # vcert python-etcd==0.4.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in -python-telegram-bot==13.7 ; python_version > "3.5" - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # apscheduler # moto # python-telegram-bot # tempora # twilio -pyvmomi==7.0.2 - # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # ansible-core # clustershell @@ -770,62 +577,81 @@ pyyaml==5.4.1 # yamllint # yamlordereddictloader pyzmq==23.2.0 - # via -r requirements/zeromq.txt + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/zeromq.txt redis-py-cluster==2.1.3 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in redis==3.5.3 - # via redis-py-cluster -requests-oauthlib==1.3.0 - # via msrest -requests==2.26.0 # via + # -c requirements/static/ci/py3.7/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # python-consul # pyvmomi - # requests-oauthlib # responses # twilio # vcert resolvelib==0.5.4 - # via ansible-core -responses==0.13.4 - # via moto + # via + # -c requirements/static/ci/py3.7/linux.txt + # ansible-core +responses==0.10.6 + # via + # -c requirements/static/ci/py3.7/linux.txt + # moto rfc3987==1.3.8 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 - # via google-auth -s3transfer==0.5.0 - # via boto3 -saltpylint==2020.9.28 + # via + # -c requirements/static/ci/py3.7/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.7/linux.txt + # boto3 +saltpylint==2023.8.3 # via -r requirements/static/ci/lint.in -scp==0.13.6 +scp==0.13.2 # via + # -c requirements/static/ci/py3.7/linux.txt # junos-eznc # napalm # netmiko semantic-version==2.9.0 - # via etcd3-py + # via + # -c requirements/static/ci/py3.7/linux.txt + # etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # apscheduler # astroid # bcrypt @@ -834,16 +660,13 @@ six==1.16.0 # etcd3-py # genshi # geomet - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # msrestazure + # more-itertools # ncclient # paramiko - # pynacl - # pyopenssl # python-consul # python-dateutil # pyvmomi @@ -854,80 +677,139 @@ six==1.16.0 # virtualenv # websocket-client slack-bolt==1.15.5 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in slack-sdk==3.19.5 - # via slack-bolt + # via + # -c requirements/static/ci/py3.7/linux.txt + # slack-bolt smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in strict-rfc3339==0.7 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in tempora==4.1.1 - # via portend -tenacity==8.0.1 - # via netmiko -textfsm==1.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # portend +terminal==0.4.0 + # via + # -c requirements/static/ci/py3.7/linux.txt + # ntc-templates +textfsm==1.1.0 + # via + # -c requirements/static/ci/py3.7/linux.txt # napalm + # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 # via + # -c requirements/static/ci/py3.7/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/ci/lint.in tornado==6.1 - # via python-telegram-bot -transitions==0.8.8 - # via junos-eznc + # via + # -c requirements/static/ci/py3.7/linux.txt + # python-telegram-bot +transitions==0.8.9 + # via + # -c requirements/static/ci/py3.7/linux.txt + # junos-eznc twilio==7.9.2 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in typed-ast==1.4.1 # via astroid typing-extensions==3.10.0.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # aiohttp # async-timeout # gitpython - # immutables # importlib-metadata # yarl tzlocal==3.0 - # via apscheduler -urllib3==1.26.6 # via + # -c requirements/static/ci/py3.7/linux.txt + # apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt # botocore + # docker # kubernetes # python-etcd # requests - # responses vcert==0.7.4 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in virtualenv==20.7.2 - # via -r requirements/static/ci/common.in -watchdog==2.1.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in websocket-client==0.40.0 # via + # -c requirements/static/ci/py3.7/linux.txt # docker # kubernetes +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/common.in werkzeug==2.2.3 - # via moto + # via + # -c requirements/static/ci/py3.7/linux.txt + # moto wrapt==1.11.1 # via astroid xmltodict==0.12.0 - # via moto + # via + # -c requirements/static/ci/py3.7/linux.txt + # moto yamllint==1.26.3 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.7/linux.txt + # -r requirements/static/ci/linux.in yamlordereddictloader==0.4.0 - # via junos-eznc + # via + # -c requirements/static/ci/py3.7/linux.txt + # junos-eznc yarl==1.7.2 - # via aiohttp -zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/py3.7/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -c requirements/static/ci/py3.7/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.7/linux-crypto.txt b/requirements/static/ci/py3.7/linux-crypto.txt index 1c23cef2513c..e3d058b26562 100644 --- a/requirements/static/ci/py3.7/linux-crypto.txt +++ b/requirements/static/ci/py3.7/linux-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/linux-crypto.txt --pip-args='--constraint=requirements/static/pkg/py3.7/linux.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/linux-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.7/linux.txt b/requirements/static/ci/py3.7/linux.txt index 0ec357f1038d..5973155bed13 100644 --- a/requirements/static/ci/py3.7/linux.txt +++ b/requirements/static/ci/py3.7/linux.txt @@ -2,13 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/linux.txt --pip-args='--constraint=requirements/static/pkg/py3.7/linux.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.3 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.8.6 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -28,7 +24,7 @@ async-timeout==4.0.2 # via aiohttp asynctest==0.13.0 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -37,316 +33,15 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.18 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.5 - # via azure -azure-datalake-store==0.0.44 - # via azure -azure-eventgrid==1.2.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.0 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.4.1 - # via azure-mgmt -azure-mgmt-containerregistry==2.7.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.5.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.0 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.6.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.0.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.8.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.1.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.0.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.6 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.0 - # via - # azure-cosmosdb-table - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv +backports.zoneinfo==0.2.1 + # via tzlocal bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -357,40 +52,46 @@ botocore==1.24.46 # boto3 # moto # s3transfer +cached-property==1.5.2 + # via pygit2 cachetools==4.2.2 # via # google-auth # python-telegram-bot cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # python-telegram-bot # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in ciscoconfparse==1.5.19 @@ -402,16 +103,16 @@ clustershell==1.8.3 colorama==0.4.3 # via ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via - # adal + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/static/pkg/linux.in # ansible-core - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko @@ -421,6 +122,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # pytest-skip-markers dnspython==1.16.0 @@ -428,11 +130,9 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -452,24 +152,29 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 # via cassandra-driver -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/linux.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # contextvars importlib-metadata==4.6.4 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/static/pkg/linux.in + # attrs # backports.entry-points-selectable # jsonschema # mako @@ -481,21 +186,27 @@ iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # ansible-core # junos-eznc @@ -503,6 +214,7 @@ jinja2==3.1.2 # napalm jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -524,7 +236,9 @@ kubernetes==3.0.0 libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc @@ -534,6 +248,7 @@ mako==1.2.2 # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # jinja2 # mako @@ -541,114 +256,28 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/linux.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.14 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.3 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp # yarl -napalm==3.1.0 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" # via -r requirements/static/ci/common.in ncclient==0.6.4 # via junos-eznc @@ -661,14 +290,14 @@ netmiko==3.2.0 # via napalm ntc-templates==1.4.0 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # ansible-core + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -678,7 +307,7 @@ paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via # -r requirements/static/ci/common.in # ciscoconfparse @@ -691,9 +320,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -705,33 +337,38 @@ pyasn1==0.4.8 # pyasn1-modules # rsa pycparser==2.17 - # via cffi + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/crypto.txt pyeapi==0.8.3 # via napalm -pygit2==1.0.3 ; python_version <= "3.8" +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.10.1 # via -r requirements/static/ci/linux.in pyiface==0.0.11 # via -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in pyjwt==2.4.0 - # via - # adal - # twilio -pymysql==1.0.2 ; python_version > "3.5" + # via twilio +pymysql==1.0.2 # via -r requirements/static/ci/linux.in -pynacl==1.3.0 - # via paramiko -pyopenssl==19.1.0 +pynacl==1.5.0 + # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 - # via - # junos-eznc - # packaging + # via junos-eznc pyrsistent==0.17.3 # via jsonschema pyserial==3.4 @@ -745,13 +382,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -760,13 +397,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -776,16 +409,13 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-consul==1.1.0 # via -r requirements/static/ci/linux.in python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -794,60 +424,55 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in -python-telegram-bot==13.7 ; python_version > "3.5" + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 # via -r requirements/static/ci/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # apscheduler # moto # python-telegram-bot # tempora # twilio - # tzlocal pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # ansible-core # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/zeromq.txt # pytest-salt-factories redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # python-consul # pyvmomi - # requests-oauthlib # responses # twilio # vcert @@ -858,7 +483,9 @@ responses==0.10.6 rfc3987==1.3.8 # via -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 # via google-auth s3transfer==0.5.2 @@ -871,29 +498,26 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # apscheduler # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # mock # more-itertools # ncclient # paramiko - # pynacl - # pyopenssl # python-consul # python-dateutil # pyvmomi @@ -907,14 +531,16 @@ slack-bolt==1.15.5 # via -r requirements/static/ci/linux.in slack-sdk==3.19.5 # via slack-bolt -smmap==3.0.4 +smmap==4.0.0 # via gitdb -sqlparse==0.4.2 +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # portend terminal==0.4.0 # via ntc-templates textfsm==1.1.0 @@ -923,19 +549,22 @@ textfsm==1.1.0 # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest tornado==6.1 # via python-telegram-bot -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc twilio==7.9.2 # via -r requirements/static/ci/linux.in typing-extensions==3.10.0.0 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # aiohttp # async-timeout # gitpython @@ -943,11 +572,13 @@ typing-extensions==3.10.0.0 # pytest-shell-utilities # pytest-system-statistics # yarl -tzlocal==2.1 +tzlocal==3.0 # via apscheduler -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -963,6 +594,8 @@ websocket-client==0.40.0 # via # docker # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in werkzeug==2.2.3 # via # moto @@ -976,9 +609,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==1.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.7/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.7/windows-crypto.txt b/requirements/static/ci/py3.7/windows-crypto.txt index d3c328449144..061b8848bbd6 100644 --- a/requirements/static/ci/py3.7/windows-crypto.txt +++ b/requirements/static/ci/py3.7/windows-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/windows-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.7/windows.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/windows-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.7/windows.txt b/requirements/static/ci/py3.7/windows.txt index 024a5d7bdc28..26df2e6b4601 100644 --- a/requirements/static/ci/py3.7/windows.txt +++ b/requirements/static/ci/py3.7/windows.txt @@ -2,9 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.7/windows.txt --pip-args='--constraint=requirements/static/pkg/py3.7/windows.txt' requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.7/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.1 +aiohttp==3.8.6 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -12,7 +12,7 @@ async-timeout==4.0.2 # via aiohttp asynctest==0.13.0 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -24,8 +24,8 @@ attrs==20.3.0 backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==4.0.1 - # via passlib -boto3==1.21.46 ; python_version >= "3.6" + # via -r requirements/static/ci/common.in +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -42,43 +42,55 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # kubernetes # requests cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # clr-loader # cryptography # pygit2 -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # aiohttp + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt click==7.1.2 # via geomet clr-loader==0.2.4 - # via pythonnet + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # pythonnet clustershell==1.8.3 # via -r requirements/static/ci/common.in colorama==0.4.1 # via pytest contextvars==2.4 - # via -r requirements/base.txt -cryptography==3.4.7 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/base.txt +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt # etcd3-py # moto @@ -88,6 +100,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt # pytest-skip-markers dmidecode==0.9.0 @@ -96,11 +109,9 @@ dnspython==1.16.0 # via # -r requirements/static/ci/common.in # python-etcd -docker-pycreds==0.4.0 - # via docker -docker==2.7.0 - # via -r requirements/static/ci/common.in -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -117,23 +128,31 @@ genshi==0.7.5 geomet==0.1.2 # via cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # contextvars importlib-metadata==4.6.4 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt + # attrs # backports.entry-points-selectable # jsonschema # mako @@ -144,26 +163,37 @@ importlib-metadata==4.6.4 iniconfig==1.0.1 # via pytest ioloop==0.1a0 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt ipaddress==1.0.22 # via kubernetes jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # jaraco.collections jaraco.collections==3.3.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.0 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt # moto jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -175,32 +205,38 @@ keyring==5.7.1 kubernetes==3.0.0 # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/base.txt lxml==4.9.1 - # via -r requirements/windows.txt -mako==1.2.2 # via - # -r requirements/static/ci/common.in + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt # jinja2 # mako # moto # werkzeug -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==8.2.0 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt # pytest-salt-factories multidict==6.0.2 @@ -209,11 +245,13 @@ multidict==6.0.2 # yarl ntlm-auth==1.5.0 # via requests-ntlm -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt + # docker # pytest -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via -r requirements/static/ci/common.in patch==1.16 # via -r requirements/static/ci/windows.in @@ -226,9 +264,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.6 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -237,27 +278,38 @@ pyasn1-modules==0.2.4 # via google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt # pyasn1-modules # rsa pycparser==2.21 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt # cffi pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pygit2==1.9.1 ; python_version >= "3.7" + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.10.1 # via -r requirements/static/ci/windows.in pymssql==2.2.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt pymysql==1.0.2 - # via -r requirements/windows.txt -pyopenssl==20.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt # etcd3-py -pyparsing==3.0.9 - # via packaging pyrsistent==0.17.3 # via jsonschema pytest-custom-exit-code==0.3.0 @@ -267,13 +319,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.6 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc21 ; sys_platform == "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -282,13 +334,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -298,10 +346,10 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt # botocore # kubernetes @@ -309,37 +357,48 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt pythonnet==3.0.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in pywin32==305 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt # cherrypy + # docker # pytest-skip-markers # wmi pywinrm==0.4.1 # via -r requirements/static/ci/windows.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt # clustershell # kubernetes + # pytest-salt-factories # yamllint -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/zeromq.txt # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt @@ -364,21 +423,19 @@ sed==0.3.1 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt six==1.15.0 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # cassandra-driver # cheroot - # docker - # docker-pycreds # etcd3-py # genshi # geomet - # google-auth # jsonschema # kubernetes - # mock - # pyopenssl # python-dateutil # pyvmomi # pywinrm @@ -386,21 +443,28 @@ six==1.15.0 # virtualenv # websocket-client smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # gitdb +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # portend timelib==0.2.5 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest -typing-extensions==4.2.0 +typing-extensions==4.4.0 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # aiohttp # async-timeout # gitpython @@ -408,10 +472,12 @@ typing-extensions==4.2.0 # pytest-shell-utilities # pytest-system-statistics # yarl -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt # -r requirements/windows.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -425,14 +491,20 @@ websocket-client==0.40.0 # via # docker # kubernetes +wempy==0.2.1 + # via -r requirements/static/ci/common.in werkzeug==2.2.3 # via # moto # pytest-httpserver wheel==0.38.4 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt wmi==1.5.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # -r requirements/windows.txt xmltodict==0.12.0 # via # moto @@ -442,9 +514,13 @@ yamllint==1.26.3 yarl==1.7.2 # via aiohttp zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.7/windows.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.8/changelog.txt b/requirements/static/ci/py3.8/changelog.txt index bef4956e8558..6b6f20b06544 100644 --- a/requirements/static/ci/py3.8/changelog.txt +++ b/requirements/static/ci/py3.8/changelog.txt @@ -2,26 +2,37 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/changelog.txt --pip-args='--constraint=requirements/static/ci/py3.8/linux.txt' requirements/static/ci/changelog.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/changelog.txt requirements/static/ci/changelog.in # click-default-group==1.2.2 # via towncrier -click==7.1.2 +click==7.1.1 # via + # -c requirements/static/ci/py3.8/linux.txt # click-default-group # towncrier incremental==17.5.0 # via towncrier jinja2==3.1.2 - # via towncrier -looseversion==1.0.3 - # via -r requirements/static/ci/changelog.in -markupsafe==2.0.1 - # via jinja2 -packaging==23.0 - # via -r requirements/static/ci/changelog.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # towncrier +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/changelog.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/changelog.in tomli==2.0.1 - # via towncrier + # via + # -c requirements/static/ci/py3.8/linux.txt + # towncrier towncrier==22.12.0 # via -r requirements/static/ci/changelog.in diff --git a/requirements/static/ci/py3.8/cloud.txt b/requirements/static/ci/py3.8/cloud.txt index 558984c02e07..e04c55630b0c 100644 --- a/requirements/static/ci/py3.8/cloud.txt +++ b/requirements/static/ci/py3.8/cloud.txt @@ -2,28 +2,33 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/cloud.txt --pip-args='--constraint=requirements/static/ci/py3.8/linux.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.7 +aiohttp==3.9.0 # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 - # via etcd3-py + # -c requirements/static/ci/py3.8/linux.txt + # etcd3-py aiosignal==1.2.0 - # via aiohttp + # via + # -c requirements/static/ci/py3.8/linux.txt + # aiohttp apache-libcloud==2.5.0 ; sys_platform != "win32" # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/cloud.in # -r requirements/static/ci/common.in -asn1crypto==1.4.0 +asn1crypto==1.3.0 # via + # -c requirements/static/ci/py3.8/linux.txt # certvalidator # oscrypto async-timeout==4.0.2 - # via aiohttp -attrs==21.2.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.8/linux.txt # aiohttp # jsonschema # pytest @@ -31,722 +36,507 @@ attrs==21.2.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.27 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.52 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 - # via virtualenv -bcrypt==3.2.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # moto boto==2.49.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in botocore==1.24.46 # via + # -c requirements/static/ci/py3.8/linux.txt # boto3 # moto # s3transfer cachetools==4.2.2 - # via google-auth -cassandra-driver==3.25.0 - # via -r requirements/static/ci/common.in -certifi==2022.12.7 # via + # -c requirements/static/ci/py3.8/linux.txt + # google-auth +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 - # via vcert + # via + # -c requirements/static/ci/py3.8/linux.txt + # vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp -cheetah3==3.2.6.post1 - # via -r requirements/static/ci/common.in +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in -ciscoconfparse==1.5.46 - # via napalm -click==8.0.1 - # via geomet +ciscoconfparse==1.5.19 + # via + # -c requirements/static/ci/py3.8/linux.txt + # napalm +click==7.1.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # geomet clustershell==1.8.3 - # via -r requirements/static/ci/common.in -colorama==0.4.4 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +colorama==0.4.3 + # via + # -c requirements/static/ci/py3.8/linux.txt + # ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt -croniter==1.0.15 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -cryptography==3.3.2 - # via - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in # etcd3-py # moto # paramiko # pyopenssl + # pyspnego # requests-ntlm # smbprotocol # vcert -distlib==0.3.3 - # via virtualenv -distro==1.6.0 +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # virtualenv +distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # pytest-skip-markers -dnspython==2.1.0 +dnspython==1.16.0 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.2 +docker==6.1.3 # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 - # via pytest + # via + # -c requirements/static/ci/py3.8/linux.txt + # pytest filelock==3.0.12 - # via virtualenv + # via + # -c requirements/static/ci/py3.8/linux.txt + # virtualenv flaky==3.7.0 - # via -r requirements/pytest.txt + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt frozenlist==1.3.0 # via + # -c requirements/static/ci/py3.8/linux.txt # aiohttp # aiosignal future==0.18.3 # via + # -c requirements/static/ci/py3.8/linux.txt # napalm # textfsm genshi==0.7.5 - # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 - # via cassandra-driver + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in google-auth==2.1.0 - # via kubernetes -idna==2.8 # via + # -c requirements/static/ci/py3.8/linux.txt + # kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # etcd3-py # requests # yarl -immutables==0.16 - # via contextvars -importlib-metadata==4.8.1 - # via -r requirements/static/pkg/linux.in -iniconfig==1.1.1 - # via pytest -ipaddress==1.0.23 - # via kubernetes -isodate==0.6.0 - # via msrest +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # contextvars +importlib-metadata==4.6.4 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # pytest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.8/linux.txt + # kubernetes jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy -jaraco.functools==3.3.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # junos-eznc # moto # napalm -jmespath==0.10.0 +jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 # botocore jsonschema==3.2.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in junos-eznc==2.4.0 ; sys_platform != "win32" and python_version <= "3.10" # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # napalm -jxmlease==1.0.3 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -kazoo==2.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in keyring==5.7.1 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in kubernetes==3.0.0 - # via -r requirements/static/ci/common.in -libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in -loguru==0.6.0 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc # napalm # ncclient mako==1.2.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # jinja2 # mako # moto # werkzeug -mock==4.0.3 - # via -r requirements/pytest.txt -more-itertools==8.8.0 +mock==5.1.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +moto==3.0.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.21 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via + # -c requirements/static/ci/py3.8/linux.txt # aiohttp # yarl -napalm==3.3.1 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" - # via -r requirements/static/ci/common.in -ncclient==0.6.12 +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +ncclient==0.6.4 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc - # napalm netaddr==0.7.19 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/cloud.in # junos-eznc # napalm # pyeapi -netmiko==3.4.0 - # via napalm -ntc-templates==2.3.2 +netmiko==3.2.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # napalm +ntc-templates==1.4.1 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc - # netmiko ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.2.0 # via - # requests-ntlm - # smbprotocol -oauthlib==3.2.2 - # via requests-oauthlib -oscrypto==1.2.1 - # via certvalidator -packaging==21.3 + # -c requirements/static/ci/py3.8/linux.txt + # certvalidator +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # junos-eznc # napalm # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse -platformdirs==2.3.0 - # via virtualenv -pluggy==1.0.0 - # via pytest -portend==2.7.1 - # via cherrypy +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # virtualenv +pluggy==0.13.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cherrypy profitbricks==4.1.3 # via -r requirements/static/ci/cloud.in psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pyasn1-modules==0.2.8 - # via google-auth +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.8/linux.txt + # google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/py3.8/linux.txt # pyasn1-modules # rsa - # smbprotocol -pycparser==2.19 - # via cffi -pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pyeapi==0.8.4 - # via napalm +pycparser==2.17 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/crypto.txt +pyeapi==0.8.3 + # via + # -c requirements/static/ci/py3.8/linux.txt + # napalm +pyfakefs==5.3.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" - # via -r requirements/static/ci/common.in -pyjwt==2.4.0 - # via adal -pynacl==1.4.0 - # via paramiko -pyopenssl==20.0.1 # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc - # packaging pypsexec==0.1.0 # via -r requirements/static/ci/cloud.in -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.8/linux.txt + # jsonschema +pyserial==3.4 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc # netmiko +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol pytest-custom-exit-code==0.3.0 - # via -r requirements/pytest.txt + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt pytest-helpers-namespace==2021.4.29 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 - # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" - # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 - # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-httpserver==1.0.8 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # -c requirements/static/ci/py3.8/linux.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.5.0 - # via -r requirements/pytest.txt -pytest-system-statistics==1.0.2 - # via pytest-salt-factories -pytest-tempdir==2019.10.12 +pytest-subtests==0.4.0 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.8/linux.txt # pytest-salt-factories -pytest-timeout==2.0.2 - # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/pytest.txt # pytest-custom-exit-code # pytest-helpers-namespace @@ -755,185 +545,251 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout -python-dateutil==2.8.2 +python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes # moto # vcert python-etcd==0.4.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # moto # tempora -pyvmomi==7.0.2 - # via -r requirements/static/ci/common.in +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/zeromq.txt # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # profitbricks # pyvmomi # pywinrm # requests-ntlm - # requests-oauthlib # responses # vcert -responses==0.14.0 - # via moto +responses==0.10.6 + # via + # -c requirements/static/ci/py3.8/linux.txt + # moto rfc3987==1.3.8 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 - # via google-auth -s3transfer==0.5.0 - # via boto3 -scp==0.14.1 # via + # -c requirements/static/ci/py3.8/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # boto3 +scp==0.13.2 + # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc # napalm # netmiko semantic-version==2.9.0 - # via etcd3-py + # via + # -c requirements/static/ci/py3.8/linux.txt + # etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # msrestazure + # more-itertools # ncclient # paramiko # profitbricks - # pynacl - # pyopenssl # pypsexec # python-dateutil # pyvmomi # pywinrm # responses - # smbprotocol # textfsm # transitions # vcert # virtualenv # websocket-client -smbprotocol==0.1.1 - # via pypsexec +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in strict-rfc3339==0.7 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in tempora==4.1.1 - # via portend -tenacity==8.0.1 - # via netmiko -textfsm==1.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # portend +terminal==0.4.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # ntc-templates +textfsm==1.1.0 + # via + # -c requirements/static/ci/py3.8/linux.txt # napalm + # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in tomli==2.0.1 - # via pytest + # via + # -c requirements/static/ci/py3.8/linux.txt + # pytest transitions==0.8.9 - # via junos-eznc -typing-extensions==3.10.0.2 # via + # -c requirements/static/ci/py3.8/linux.txt + # junos-eznc +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.8/linux.txt # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # botocore + # docker # kubernetes # python-etcd # requests - # responses vcert==0.7.4 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -virtualenv==20.8.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # pytest-salt-factories -watchdog==2.1.5 - # via -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in websocket-client==0.40.0 # via + # -c requirements/static/ci/py3.8/linux.txt # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.8/linux.txt # moto # pytest-httpserver xmltodict==0.12.0 # via + # -c requirements/static/ci/py3.8/linux.txt # moto # pywinrm yamlordereddictloader==0.4.0 - # via junos-eznc + # via + # -c requirements/static/ci/py3.8/linux.txt + # junos-eznc yarl==1.7.2 - # via aiohttp -zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/py3.8/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.8/docs.txt b/requirements/static/ci/py3.8/docs.txt index 05d47e714327..9e1df27652bb 100644 --- a/requirements/static/ci/py3.8/docs.txt +++ b/requirements/static/ci/py3.8/docs.txt @@ -2,17 +2,17 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt # alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx -certifi==2022.12.7 +certifi==2023.07.22 # via # -c requirements/static/ci/py3.8/linux.txt # requests -chardet==3.0.4 +charset-normalizer==3.2.0 # via # -c requirements/static/ci/py3.8/linux.txt # requests @@ -34,7 +34,7 @@ distro==1.5.0 # -r requirements/base.txt docutils==0.16 # via sphinx -idna==2.8 +idna==3.2 # via # -c requirements/static/ci/py3.8/linux.txt # requests @@ -78,7 +78,7 @@ looseversion==1.0.2 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt -markdown-it-py==2.1.0 +markdown-it-py==2.2.0 # via # mdit-py-plugins # myst-docutils @@ -130,7 +130,7 @@ pytz==2022.1 # -c requirements/static/ci/py3.8/linux.txt # babel # tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt @@ -139,7 +139,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt @@ -177,13 +177,13 @@ tempora==4.1.1 # via # -c requirements/static/ci/py3.8/linux.txt # portend -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # -c requirements/static/ci/py3.8/linux.txt # myst-docutils uc-micro-py==1.0.1 # via linkify-it-py -urllib3==1.26.6 +urllib3==1.26.18 # via # -c requirements/static/ci/py3.8/linux.txt # requests diff --git a/requirements/static/ci/py3.8/freebsd-crypto.txt b/requirements/static/ci/py3.8/freebsd-crypto.txt index 33138773050a..4312bea13845 100644 --- a/requirements/static/ci/py3.8/freebsd-crypto.txt +++ b/requirements/static/ci/py3.8/freebsd-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/freebsd-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.8/freebsd.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/freebsd-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.8/freebsd.txt b/requirements/static/ci/py3.8/freebsd.txt index fbace010c72c..486a8ad0ac29 100644 --- a/requirements/static/ci/py3.8/freebsd.txt +++ b/requirements/static/ci/py3.8/freebsd.txt @@ -2,13 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/freebsd.txt --pip-args='--constraint=requirements/static/pkg/py3.8/freebsd.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -adal==1.2.5 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -20,7 +16,7 @@ asn1crypto==1.3.0 # oscrypto async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -29,311 +25,13 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.26 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.51 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -348,33 +46,36 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/freebsd.in ciscoconfparse==1.5.19 @@ -386,15 +87,15 @@ clustershell==1.8.3 colorama==0.4.3 # via ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/static/pkg/freebsd.in # etcd3-py # moto # paramiko @@ -404,6 +105,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt # -r requirements/static/pkg/freebsd.in # pytest-skip-markers @@ -412,11 +114,9 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -436,48 +136,60 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.2.1.post1 # via cassandra-driver -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/freebsd.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # contextvars importlib-metadata==4.6.4 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/static/pkg/freebsd.in iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt # junos-eznc # moto # napalm jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -499,7 +211,9 @@ kubernetes==3.0.0 libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc @@ -509,6 +223,7 @@ mako==1.2.2 # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt # jinja2 # mako @@ -516,117 +231,28 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/freebsd.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.19 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp # yarl -napalm==3.1.0 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" # via -r requirements/static/ci/common.in ncclient==0.6.4 # via junos-eznc @@ -639,13 +265,13 @@ netmiko==3.2.0 # via napalm ntc-templates==1.4.1 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -655,7 +281,7 @@ paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via # -r requirements/static/ci/common.in # ciscoconfparse @@ -668,9 +294,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -682,27 +311,32 @@ pyasn1==0.4.8 # pyasn1-modules # rsa pycparser==2.17 - # via cffi + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/crypto.txt pyeapi==0.8.3 # via napalm -pygit2==1.8.0 ; python_version >= "3.7" +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/freebsd.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in -pyjwt==2.4.0 - # via adal -pynacl==1.3.0 - # via paramiko -pyopenssl==19.1.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/static/pkg/freebsd.in # etcd3-py pyparsing==3.0.9 - # via - # junos-eznc - # packaging + # via junos-eznc pyrsistent==0.17.3 # via jsonschema pyserial==3.4 @@ -716,13 +350,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -731,13 +365,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -747,14 +377,11 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/static/pkg/freebsd.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -763,48 +390,44 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/static/pkg/freebsd.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # pyvmomi - # requests-oauthlib # responses # vcert responses==0.10.6 @@ -823,29 +446,25 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/static/pkg/freebsd.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # mock # more-itertools - # msrestazure # ncclient # paramiko - # pynacl - # pyopenssl # python-dateutil # pyvmomi # responses @@ -854,14 +473,16 @@ six==1.16.0 # vcert # virtualenv # websocket-client -smmap==3.0.4 +smmap==4.0.0 # via gitdb -sqlparse==0.4.2 +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # portend terminal==0.4.0 # via ntc-templates textfsm==1.1.0 @@ -870,20 +491,24 @@ textfsm==1.1.0 # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # -r requirements/static/pkg/freebsd.in toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -899,7 +524,9 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver @@ -912,9 +539,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==1.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.8/freebsd.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.8/invoke.txt b/requirements/static/ci/py3.8/invoke.txt deleted file mode 100644 index 356dd4213081..000000000000 --- a/requirements/static/ci/py3.8/invoke.txt +++ /dev/null @@ -1,14 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements/static/ci/py3.8/invoke.txt --pip-args='--constraint=requirements/static/ci/py3.8/linux.txt' requirements/static/ci/invoke.in -# -blessings==1.7 - # via -r requirements/static/ci/invoke.in -invoke==1.4.1 - # via -r requirements/static/ci/invoke.in -pyyaml==5.4.1 - # via -r requirements/static/ci/invoke.in -six==1.16.0 - # via blessings diff --git a/requirements/static/ci/py3.8/lint.txt b/requirements/static/ci/py3.8/lint.txt index b2de8278b693..d4d26cab35e3 100644 --- a/requirements/static/ci/py3.8/lint.txt +++ b/requirements/static/ci/py3.8/lint.txt @@ -2,518 +2,336 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/lint.txt --pip-args='--constraint=requirements/static/ci/py3.8/linux.txt' requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.7 +aiohttp==3.9.0 # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 - # via etcd3-py + # -c requirements/static/ci/py3.8/linux.txt + # etcd3-py aiosignal==1.2.0 - # via aiohttp -ansible-core==2.11.4 - # via ansible + # via + # -c requirements/static/ci/py3.8/linux.txt + # aiohttp +ansible-core==2.11.7 + # via + # -c requirements/static/ci/py3.8/linux.txt + # ansible ansible==4.4.0 ; python_version < "3.9" - # via -r requirements/static/ci/linux.in -apache-libcloud==3.3.1 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in apscheduler==3.6.3 - # via python-telegram-bot -asn1crypto==1.4.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # python-telegram-bot +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.8/linux.txt # certvalidator # oscrypto astroid==2.3.3 # via pylint async-timeout==4.0.2 - # via aiohttp -attrs==21.2.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.8/linux.txt # aiohttp # jsonschema -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.27 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.52 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 - # via virtualenv + # via + # -c requirements/static/ci/py3.8/linux.txt + # virtualenv backports.zoneinfo==0.2.1 - # via tzlocal -bcrypt==3.2.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # tzlocal +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # moto boto==2.49.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in botocore==1.24.46 # via + # -c requirements/static/ci/py3.8/linux.txt # boto3 # moto # s3transfer cachetools==4.2.2 # via + # -c requirements/static/ci/py3.8/linux.txt # google-auth # python-telegram-bot -cassandra-driver==3.25.0 - # via -r requirements/static/ci/common.in -certifi==2022.12.7 +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # python-telegram-bot # requests certvalidator==0.11.1 - # via vcert + # via + # -c requirements/static/ci/py3.8/linux.txt + # vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -charset-normalizer==2.0.4 +charset-normalizer==3.2.0 # via - # aiohttp + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # requests -cheetah3==3.2.6.post1 - # via -r requirements/static/ci/common.in +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in -ciscoconfparse==1.5.46 - # via napalm -click==8.0.1 - # via geomet +ciscoconfparse==1.5.19 + # via + # -c requirements/static/ci/py3.8/linux.txt + # napalm +click==7.1.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # geomet clustershell==1.8.3 - # via -r requirements/static/ci/common.in -colorama==0.4.4 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +colorama==0.4.3 + # via + # -c requirements/static/ci/py3.8/linux.txt + # ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt -croniter==1.0.15 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -cryptography==3.4.7 # via - # adal + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in # ansible-core - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko # pyopenssl # vcert distlib==0.3.2 - # via virtualenv -distro==1.6.0 - # via -r requirements/base.txt -dnspython==2.1.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/base.txt +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.0 - # via -r requirements/static/ci/common.in -etcd3-py==0.1.6 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +docker==6.1.3 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/lint.in +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in filelock==3.0.12 - # via virtualenv + # via + # -c requirements/static/ci/py3.8/linux.txt + # virtualenv frozenlist==1.3.0 # via + # -c requirements/static/ci/py3.8/linux.txt # aiohttp # aiosignal future==0.18.3 # via + # -c requirements/static/ci/py3.8/linux.txt # napalm # textfsm genshi==0.7.5 - # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 - # via cassandra-driver + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" - # via -r requirements/static/ci/common.in -google-auth==2.0.1 - # via kubernetes + # via + # -c requirements/static/ci/py3.8/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # kubernetes hglib==2.6.1 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # etcd3-py # requests # yarl -immutables==0.16 - # via contextvars +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # contextvars importlib-metadata==4.6.4 - # via -r requirements/static/pkg/linux.in -ipaddress==1.0.23 - # via kubernetes -isodate==0.6.0 - # via msrest + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.8/linux.txt + # kubernetes isort==4.3.21 # via pylint jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy -jaraco.functools==3.3.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # ansible-core # junos-eznc # moto # napalm -jmespath==0.10.0 +jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 # botocore jsonschema==3.2.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in junos-eznc==2.4.0 ; sys_platform != "win32" and python_version <= "3.10" # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # napalm -jxmlease==1.0.3 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -kazoo==2.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in keyring==5.7.1 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in kubernetes==3.0.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in lazy-object-proxy==1.4.3 # via astroid -libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in -loguru==0.6.0 - # via ciscoconfparse +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc # napalm # ncclient mako==1.2.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # jinja2 # mako @@ -522,236 +340,219 @@ markupsafe==2.1.2 mccabe==0.6.1 # via pylint mercurial==6.0.1 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in modernize==0.5 # via saltpylint -more-itertools==8.8.0 +more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +moto==3.0.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in msgpack==1.0.2 - # via -r requirements/base.txt -msrest==0.6.21 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/base.txt multidict==6.0.2 # via + # -c requirements/static/ci/py3.8/linux.txt # aiohttp # yarl -napalm==3.3.1 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" - # via -r requirements/static/ci/common.in -ncclient==0.6.12 +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +ncclient==0.6.4 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc - # napalm -netaddr==0.8.0 +netaddr==0.7.19 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc # napalm # pyeapi -netmiko==3.4.0 - # via napalm -ntc-templates==2.2.2 +netmiko==3.2.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # napalm +ntc-templates==1.4.1 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc - # netmiko -oauthlib==3.2.2 - # via requests-oauthlib -oscrypto==1.2.1 - # via certvalidator -packaging==21.3 +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # certvalidator +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # ansible-core + # docker paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # junos-eznc # napalm # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse pathspec==0.9.0 - # via yamllint + # via + # -c requirements/static/ci/py3.8/linux.txt + # yamllint +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # watchdog platformdirs==2.2.0 - # via virtualenv -portend==2.7.1 - # via cherrypy + # via + # -c requirements/static/ci/py3.8/linux.txt + # virtualenv +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cherrypy psutil==5.8.0 - # via -r requirements/base.txt -pyasn1-modules==0.2.8 - # via google-auth + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.8/linux.txt + # google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/py3.8/linux.txt # pyasn1-modules # rsa pycodestyle==2.5.0 # via saltpylint -pycparser==2.20 - # via cffi -pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pyeapi==0.8.4 - # via napalm -pygit2==1.0.3 ; python_version <= "3.8" - # via -r requirements/static/ci/linux.in +pycparser==2.17 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cffi +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/crypto.txt +pyeapi==0.8.3 + # via + # -c requirements/static/ci/py3.8/linux.txt + # napalm +pygit2==1.13.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in pyiface==0.0.11 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in pyjwt==2.4.0 # via - # adal + # -c requirements/static/ci/py3.8/linux.txt # twilio pylint==2.4.4 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 ; python_version > "3.5" - # via -r requirements/static/ci/linux.in -pynacl==1.4.0 - # via paramiko -pyopenssl==20.0.1 +pymysql==1.0.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc - # packaging -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 +pyrsistent==0.17.3 # via + # -c requirements/static/ci/py3.8/linux.txt + # jsonschema +pyserial==3.4 + # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc # netmiko python-consul==1.1.0 - # via -r requirements/static/ci/linux.in -python-dateutil==2.8.2 # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes # moto # vcert python-etcd==0.4.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in -python-telegram-bot==13.7 ; python_version > "3.5" - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # apscheduler # moto # python-telegram-bot # tempora # twilio -pyvmomi==7.0.2 - # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # ansible-core # clustershell @@ -761,62 +562,81 @@ pyyaml==5.4.1 # yamllint # yamlordereddictloader pyzmq==23.2.0 - # via -r requirements/zeromq.txt + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/zeromq.txt redis-py-cluster==2.1.3 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in redis==3.5.3 - # via redis-py-cluster -requests-oauthlib==1.3.0 - # via msrest -requests==2.26.0 # via + # -c requirements/static/ci/py3.8/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # python-consul # pyvmomi - # requests-oauthlib # responses # twilio # vcert resolvelib==0.5.4 - # via ansible-core -responses==0.13.4 - # via moto + # via + # -c requirements/static/ci/py3.8/linux.txt + # ansible-core +responses==0.10.6 + # via + # -c requirements/static/ci/py3.8/linux.txt + # moto rfc3987==1.3.8 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 - # via google-auth -s3transfer==0.5.0 - # via boto3 -saltpylint==2020.9.28 + # via + # -c requirements/static/ci/py3.8/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.8/linux.txt + # boto3 +saltpylint==2023.8.3 # via -r requirements/static/ci/lint.in -scp==0.13.6 +scp==0.13.2 # via + # -c requirements/static/ci/py3.8/linux.txt # junos-eznc # napalm # netmiko semantic-version==2.9.0 - # via etcd3-py + # via + # -c requirements/static/ci/py3.8/linux.txt + # etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # apscheduler # astroid # bcrypt @@ -825,16 +645,13 @@ six==1.16.0 # etcd3-py # genshi # geomet - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # msrestazure + # more-itertools # ncclient # paramiko - # pynacl - # pyopenssl # python-consul # python-dateutil # pyvmomi @@ -845,70 +662,128 @@ six==1.16.0 # virtualenv # websocket-client slack-bolt==1.15.5 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in slack-sdk==3.19.5 - # via slack-bolt + # via + # -c requirements/static/ci/py3.8/linux.txt + # slack-bolt smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in strict-rfc3339==0.7 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in tempora==4.1.1 - # via portend -tenacity==8.0.1 - # via netmiko -textfsm==1.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # portend +terminal==0.4.0 + # via + # -c requirements/static/ci/py3.8/linux.txt + # ntc-templates +textfsm==1.1.0 + # via + # -c requirements/static/ci/py3.8/linux.txt # napalm + # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 # via + # -c requirements/static/ci/py3.8/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/ci/lint.in tornado==6.1 - # via python-telegram-bot -transitions==0.8.8 - # via junos-eznc + # via + # -c requirements/static/ci/py3.8/linux.txt + # python-telegram-bot +transitions==0.8.9 + # via + # -c requirements/static/ci/py3.8/linux.txt + # junos-eznc twilio==7.9.2 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in tzlocal==3.0 - # via apscheduler -urllib3==1.26.6 # via + # -c requirements/static/ci/py3.8/linux.txt + # apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt # botocore + # docker # kubernetes # python-etcd # requests - # responses vcert==0.7.4 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in virtualenv==20.7.2 - # via -r requirements/static/ci/common.in -watchdog==2.1.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in websocket-client==0.40.0 # via + # -c requirements/static/ci/py3.8/linux.txt # docker # kubernetes -werkzeug==2.2.3 - # via moto +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.8/linux.txt + # moto wrapt==1.11.1 # via astroid xmltodict==0.12.0 - # via moto + # via + # -c requirements/static/ci/py3.8/linux.txt + # moto yamllint==1.26.3 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.8/linux.txt + # -r requirements/static/ci/linux.in yamlordereddictloader==0.4.0 - # via junos-eznc + # via + # -c requirements/static/ci/py3.8/linux.txt + # junos-eznc yarl==1.7.2 - # via aiohttp -zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/py3.8/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -c requirements/static/ci/py3.8/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.8/linux-crypto.txt b/requirements/static/ci/py3.8/linux-crypto.txt index 18165a062ba0..064707bf3c20 100644 --- a/requirements/static/ci/py3.8/linux-crypto.txt +++ b/requirements/static/ci/py3.8/linux-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/linux-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.8/linux.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/linux-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.8/linux.txt b/requirements/static/ci/py3.8/linux.txt index 66550bd047dd..1349031a8091 100644 --- a/requirements/static/ci/py3.8/linux.txt +++ b/requirements/static/ci/py3.8/linux.txt @@ -2,13 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/linux.txt --pip-args='--constraint=requirements/static/pkg/py3.8/linux.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.3 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -26,7 +22,7 @@ asn1crypto==1.3.0 # oscrypto async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -35,316 +31,15 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.18 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.5 - # via azure -azure-datalake-store==0.0.44 - # via azure -azure-eventgrid==1.2.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.0 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.4.1 - # via azure-mgmt -azure-mgmt-containerregistry==2.7.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.5.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.0 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.6.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.0.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.8.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.1.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.0.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.6 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.0 - # via - # azure-cosmosdb-table - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv +backports.zoneinfo==0.2.1 + # via tzlocal bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -361,34 +56,37 @@ cachetools==4.2.2 # python-telegram-bot cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # python-telegram-bot # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in ciscoconfparse==1.5.19 @@ -400,16 +98,16 @@ clustershell==1.8.3 colorama==0.4.3 # via ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via - # adal + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/static/pkg/linux.in # ansible-core - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko @@ -419,6 +117,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # pytest-skip-markers dnspython==1.16.0 @@ -426,11 +125,9 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -450,42 +147,53 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 # via cassandra-driver -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/linux.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # contextvars importlib-metadata==4.6.4 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/static/pkg/linux.in iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # ansible-core # junos-eznc @@ -493,6 +201,7 @@ jinja2==3.1.2 # napalm jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -514,7 +223,9 @@ kubernetes==3.0.0 libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc @@ -524,6 +235,7 @@ mako==1.2.2 # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # jinja2 # mako @@ -531,114 +243,28 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/linux.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.14 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.3 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp # yarl -napalm==3.1.0 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" # via -r requirements/static/ci/common.in ncclient==0.6.4 # via junos-eznc @@ -651,14 +277,14 @@ netmiko==3.2.0 # via napalm ntc-templates==1.4.1 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # ansible-core + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -668,7 +294,7 @@ paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via # -r requirements/static/ci/common.in # ciscoconfparse @@ -681,9 +307,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -695,27 +324,34 @@ pyasn1==0.4.8 # pyasn1-modules # rsa pycparser==2.17 - # via cffi + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/crypto.txt pyeapi==0.8.3 # via napalm -pygit2==1.0.3 ; python_version <= "3.8" +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/linux.in pyiface==0.0.11 # via -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in pyjwt==2.4.0 - # via - # adal - # twilio -pymysql==1.0.2 ; python_version > "3.5" + # via twilio +pymysql==1.0.2 # via -r requirements/static/ci/linux.in -pynacl==1.3.0 - # via paramiko -pyopenssl==19.1.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 @@ -733,13 +369,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -748,13 +384,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -764,16 +396,13 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-consul==1.1.0 # via -r requirements/static/ci/linux.in python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -782,60 +411,55 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in -python-telegram-bot==13.7 ; python_version > "3.5" + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 # via -r requirements/static/ci/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # apscheduler # moto # python-telegram-bot # tempora # twilio - # tzlocal pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # ansible-core # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/zeromq.txt # pytest-salt-factories redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # python-consul # pyvmomi - # requests-oauthlib # responses # twilio # vcert @@ -846,7 +470,9 @@ responses==0.10.6 rfc3987==1.3.8 # via -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 # via google-auth s3transfer==0.5.2 @@ -859,29 +485,26 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # apscheduler # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # mock # more-itertools # ncclient # paramiko - # pynacl - # pyopenssl # python-consul # python-dateutil # pyvmomi @@ -895,14 +518,16 @@ slack-bolt==1.15.5 # via -r requirements/static/ci/linux.in slack-sdk==3.19.5 # via slack-bolt -smmap==3.0.4 +smmap==4.0.0 # via gitdb -sqlparse==0.4.2 +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # portend terminal==0.4.0 # via ntc-templates textfsm==1.1.0 @@ -911,26 +536,30 @@ textfsm==1.1.0 # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest tornado==6.1 # via python-telegram-bot -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc twilio==7.9.2 # via -r requirements/static/ci/linux.in -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # pytest-shell-utilities # pytest-system-statistics -tzlocal==2.1 +tzlocal==3.0 # via apscheduler -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -946,7 +575,9 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver @@ -959,9 +590,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==1.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.8/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.8/windows-crypto.txt b/requirements/static/ci/py3.8/windows-crypto.txt index 53af09c41b2a..17fbd42c3d96 100644 --- a/requirements/static/ci/py3.8/windows-crypto.txt +++ b/requirements/static/ci/py3.8/windows-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/windows-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.8/windows.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/windows-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.8/windows.txt b/requirements/static/ci/py3.8/windows.txt index 42b3da15f552..1fb470c86dcb 100644 --- a/requirements/static/ci/py3.8/windows.txt +++ b/requirements/static/ci/py3.8/windows.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.8/windows.txt --pip-args='--constraint=requirements/static/pkg/py3.8/windows.txt' requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.8/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -22,8 +22,8 @@ attrs==20.3.0 backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==4.0.1 - # via passlib -boto3==1.21.46 ; python_version >= "3.6" + # via -r requirements/static/ci/common.in +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -38,43 +38,54 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # kubernetes # requests cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # clr-loader # cryptography # pygit2 -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt click==7.1.2 # via geomet clr-loader==0.2.4 - # via pythonnet + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # pythonnet clustershell==1.8.3 # via -r requirements/static/ci/common.in colorama==0.4.1 # via pytest contextvars==2.4 - # via -r requirements/base.txt -cryptography==3.4.7 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/base.txt +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt # etcd3-py # moto @@ -84,6 +95,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt # pytest-skip-markers dmidecode==0.9.0 @@ -92,11 +104,9 @@ dnspython==1.16.0 # via # -r requirements/static/ci/common.in # python-etcd -docker-pycreds==0.4.0 - # via docker -docker==2.7.0 - # via -r requirements/static/ci/common.in -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -113,45 +123,64 @@ genshi==0.7.5 geomet==0.1.2 # via cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # contextvars importlib-metadata==4.6.4 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt iniconfig==1.0.1 # via pytest ioloop==0.1a0 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt ipaddress==1.0.22 # via kubernetes jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # jaraco.collections jaraco.collections==3.3.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.0 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt # moto jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -163,32 +192,38 @@ keyring==5.7.1 kubernetes==3.0.0 # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/base.txt lxml==4.9.1 - # via -r requirements/windows.txt -mako==1.2.2 # via - # -r requirements/static/ci/common.in + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt # jinja2 # mako # moto # werkzeug -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==8.2.0 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt # pytest-salt-factories multidict==6.0.2 @@ -197,11 +232,13 @@ multidict==6.0.2 # yarl ntlm-auth==1.5.0 # via requests-ntlm -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt + # docker # pytest -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via -r requirements/static/ci/common.in patch==1.16 # via -r requirements/static/ci/windows.in @@ -214,9 +251,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.6 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -225,27 +265,38 @@ pyasn1-modules==0.2.4 # via google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt # pyasn1-modules # rsa pycparser==2.21 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt # cffi pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pygit2==1.9.1 ; python_version >= "3.7" + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/windows.in pymssql==2.2.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt pymysql==1.0.2 - # via -r requirements/windows.txt -pyopenssl==20.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt # etcd3-py -pyparsing==3.0.9 - # via packaging pyrsistent==0.17.3 # via jsonschema pytest-custom-exit-code==0.3.0 @@ -255,13 +306,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc21 ; sys_platform == "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -270,13 +321,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -286,10 +333,10 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt # botocore # kubernetes @@ -297,37 +344,48 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt pythonnet==3.0.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in pywin32==305 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt # cherrypy + # docker # pytest-skip-markers # wmi pywinrm==0.4.1 # via -r requirements/static/ci/windows.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt # clustershell # kubernetes + # pytest-salt-factories # yamllint -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/zeromq.txt # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt @@ -352,21 +410,19 @@ sed==0.3.1 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt six==1.15.0 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # cassandra-driver # cheroot - # docker - # docker-pycreds # etcd3-py # genshi # geomet - # google-auth # jsonschema # kubernetes - # mock - # pyopenssl # python-dateutil # pyvmomi # pywinrm @@ -374,15 +430,21 @@ six==1.15.0 # virtualenv # websocket-client smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # gitdb +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # portend timelib==0.2.5 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 @@ -391,10 +453,12 @@ typing-extensions==4.2.0 # via # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt # -r requirements/windows.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -408,14 +472,20 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver wheel==0.38.4 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt wmi==1.5.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # -r requirements/windows.txt xmltodict==0.12.0 # via # moto @@ -425,9 +495,13 @@ yamllint==1.26.3 yarl==1.7.2 # via aiohttp zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.8/windows.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.9/changelog.txt b/requirements/static/ci/py3.9/changelog.txt index 43b8ccc84062..92c670f312e4 100644 --- a/requirements/static/ci/py3.9/changelog.txt +++ b/requirements/static/ci/py3.9/changelog.txt @@ -2,26 +2,37 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/changelog.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/static/ci/changelog.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/changelog.txt requirements/static/ci/changelog.in # click-default-group==1.2.2 # via towncrier -click==7.1.2 +click==7.1.1 # via + # -c requirements/static/ci/py3.9/linux.txt # click-default-group # towncrier incremental==17.5.0 # via towncrier jinja2==3.1.2 - # via towncrier -looseversion==1.0.3 - # via -r requirements/static/ci/changelog.in -markupsafe==2.0.1 - # via jinja2 -packaging==23.0 - # via -r requirements/static/ci/changelog.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # towncrier +looseversion==1.0.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/changelog.in +markupsafe==2.1.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # jinja2 +packaging==22.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/changelog.in tomli==2.0.1 - # via towncrier + # via + # -c requirements/static/ci/py3.9/linux.txt + # towncrier towncrier==22.12.0 # via -r requirements/static/ci/changelog.in diff --git a/requirements/static/ci/py3.9/cloud.txt b/requirements/static/ci/py3.9/cloud.txt index b9bd2abca6a3..49fb9216523c 100644 --- a/requirements/static/ci/py3.9/cloud.txt +++ b/requirements/static/ci/py3.9/cloud.txt @@ -2,28 +2,33 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/cloud.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/cloud.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/cloud.in requirements/static/ci/common.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.7 +aiohttp==3.9.0 # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 - # via etcd3-py + # -c requirements/static/ci/py3.9/linux.txt + # etcd3-py aiosignal==1.2.0 - # via aiohttp + # via + # -c requirements/static/ci/py3.9/linux.txt + # aiohttp apache-libcloud==2.5.0 ; sys_platform != "win32" # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/cloud.in # -r requirements/static/ci/common.in -asn1crypto==1.4.0 +asn1crypto==1.3.0 # via + # -c requirements/static/ci/py3.9/linux.txt # certvalidator # oscrypto async-timeout==4.0.2 - # via aiohttp -attrs==21.2.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.9/linux.txt # aiohttp # jsonschema # pytest @@ -31,725 +36,509 @@ attrs==21.2.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.27 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.52 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 - # via virtualenv -bcrypt==3.2.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # moto boto==2.49.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in botocore==1.24.46 # via + # -c requirements/static/ci/py3.9/linux.txt # boto3 # moto # s3transfer cachetools==4.2.2 - # via google-auth -cassandra-driver==3.25.0 - # via -r requirements/static/ci/common.in -certifi==2022.12.7 # via + # -c requirements/static/ci/py3.9/linux.txt + # google-auth +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 - # via vcert + # via + # -c requirements/static/ci/py3.9/linux.txt + # vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp -cheetah3==3.2.6.post1 - # via -r requirements/static/ci/common.in +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # requests +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in -ciscoconfparse==1.5.46 - # via napalm -click==8.0.1 - # via geomet +ciscoconfparse==1.5.19 + # via + # -c requirements/static/ci/py3.9/linux.txt + # napalm +click==7.1.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # geomet clustershell==1.8.3 - # via -r requirements/static/ci/common.in -colorama==0.4.4 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +colorama==0.4.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt -croniter==1.0.15 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -cryptography==3.3.2 - # via - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in # etcd3-py # moto # paramiko # pyopenssl + # pyspnego # requests-ntlm # smbprotocol # vcert -distlib==0.3.3 - # via virtualenv -distro==1.6.0 +distlib==0.3.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # virtualenv +distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # pytest-skip-markers -dnspython==2.1.0 +dnspython==1.16.0 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.2 +docker==6.1.3 # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 - # via pytest + # via + # -c requirements/static/ci/py3.9/linux.txt + # pytest filelock==3.0.12 - # via virtualenv + # via + # -c requirements/static/ci/py3.9/linux.txt + # virtualenv flaky==3.7.0 - # via -r requirements/pytest.txt + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt frozenlist==1.3.0 # via + # -c requirements/static/ci/py3.9/linux.txt # aiohttp # aiosignal future==0.18.3 # via + # -c requirements/static/ci/py3.9/linux.txt # napalm # textfsm genshi==0.7.5 - # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 - # via cassandra-driver + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in google-auth==2.1.0 - # via kubernetes -idna==2.8 # via + # -c requirements/static/ci/py3.9/linux.txt + # kubernetes +idna==3.2 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # etcd3-py # requests # yarl -immutables==0.16 - # via contextvars +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/static/pkg/linux.in -iniconfig==1.1.1 - # via pytest -ipaddress==1.0.23 - # via kubernetes -isodate==0.6.0 - # via msrest + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in +iniconfig==1.0.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # pytest +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.9/linux.txt + # kubernetes jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy -jaraco.functools==3.3.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # junos-eznc # moto # napalm -jmespath==0.10.0 +jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 # botocore jsonschema==3.2.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in junos-eznc==2.4.0 ; sys_platform != "win32" and python_version <= "3.10" # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # napalm -jxmlease==1.0.3 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -kazoo==2.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in keyring==5.7.1 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in kubernetes==3.0.0 - # via -r requirements/static/ci/common.in -libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in -loguru==0.6.0 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc # napalm # ncclient mako==1.2.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # jinja2 # mako # moto # werkzeug -mock==4.0.3 - # via -r requirements/pytest.txt -more-itertools==8.8.0 +mock==5.1.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt +more-itertools==5.0.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +moto==3.0.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.21 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via + # -c requirements/static/ci/py3.9/linux.txt # aiohttp # yarl -napalm==3.3.1 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" - # via -r requirements/static/ci/common.in -ncclient==0.6.12 +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +ncclient==0.6.4 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc - # napalm netaddr==0.7.19 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/cloud.in # junos-eznc # napalm # pyeapi -netmiko==3.4.0 - # via napalm -ntc-templates==2.3.2 +netmiko==3.2.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # napalm +ntc-templates==1.4.1 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc - # netmiko ntlm-auth==1.3.0 + # via requests-ntlm +oscrypto==1.2.0 # via - # requests-ntlm - # smbprotocol -oauthlib==3.2.2 - # via requests-oauthlib -oscrypto==1.2.1 - # via certvalidator -packaging==21.3 + # -c requirements/static/ci/py3.9/linux.txt + # certvalidator +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # junos-eznc # napalm # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse -platformdirs==2.3.0 - # via virtualenv -pluggy==1.0.0 - # via pytest -portend==2.7.1 - # via cherrypy +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # watchdog +platformdirs==2.2.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # virtualenv +pluggy==0.13.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # pytest +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # cherrypy profitbricks==4.1.3 # via -r requirements/static/ci/cloud.in psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pyasn1-modules==0.2.8 - # via google-auth +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.9/linux.txt + # google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/py3.9/linux.txt # pyasn1-modules # rsa - # smbprotocol pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in # cffi -pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pyeapi==0.8.4 - # via napalm +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/crypto.txt +pyeapi==0.8.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # napalm +pyfakefs==5.3.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" - # via -r requirements/static/ci/common.in -pyjwt==2.4.0 - # via adal -pynacl==1.4.0 - # via paramiko -pyopenssl==20.0.1 # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc - # packaging pypsexec==0.1.0 # via -r requirements/static/ci/cloud.in -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # jsonschema +pyserial==3.4 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc # netmiko +pyspnego==0.8.0 + # via + # -r requirements/static/ci/cloud.in + # smbprotocol pytest-custom-exit-code==0.3.0 - # via -r requirements/pytest.txt + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt pytest-helpers-namespace==2021.4.29 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 - # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" - # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 - # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-httpserver==1.0.8 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt +pytest-salt-factories==1.0.0rc28 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt +pytest-shell-utilities==1.8.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # pytest-salt-factories +pytest-skip-markers==1.5.0 + # via + # -c requirements/static/ci/py3.9/linux.txt # pytest-salt-factories # pytest-shell-utilities # pytest-system-statistics -pytest-subtests==0.5.0 - # via -r requirements/pytest.txt -pytest-system-statistics==1.0.2 - # via pytest-salt-factories -pytest-tempdir==2019.10.12 +pytest-subtests==0.4.0 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt +pytest-system-statistics==1.0.2 + # via + # -c requirements/static/ci/py3.9/linux.txt # pytest-salt-factories -pytest-timeout==2.0.2 - # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest-timeout==1.4.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/pytest.txt +pytest==7.2.0 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/pytest.txt # pytest-custom-exit-code # pytest-helpers-namespace @@ -758,185 +547,251 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout -python-dateutil==2.8.2 +python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes # moto # vcert python-etcd==0.4.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # moto # tempora -pyvmomi==7.0.2 - # via -r requirements/static/ci/common.in +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in pywinrm==0.3.0 # via -r requirements/static/ci/cloud.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/zeromq.txt # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # profitbricks # pyvmomi # pywinrm # requests-ntlm - # requests-oauthlib # responses # vcert -responses==0.14.0 - # via moto +responses==0.10.6 + # via + # -c requirements/static/ci/py3.9/linux.txt + # moto rfc3987==1.3.8 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 - # via google-auth -s3transfer==0.5.0 - # via boto3 -scp==0.14.1 # via + # -c requirements/static/ci/py3.9/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # boto3 +scp==0.13.2 + # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc # napalm # netmiko semantic-version==2.9.0 - # via etcd3-py + # via + # -c requirements/static/ci/py3.9/linux.txt + # etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # msrestazure + # more-itertools # ncclient # paramiko # profitbricks - # pynacl - # pyopenssl # pypsexec # python-dateutil # pyvmomi # pywinrm # responses - # smbprotocol # textfsm # transitions # vcert # virtualenv # websocket-client -smbprotocol==0.1.1 - # via pypsexec +smbprotocol==1.10.1 + # via + # -r requirements/static/ci/cloud.in + # pypsexec smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in strict-rfc3339==0.7 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in tempora==4.1.1 - # via portend -tenacity==8.0.1 - # via netmiko -textfsm==1.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # portend +terminal==0.4.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # ntc-templates +textfsm==1.1.0 + # via + # -c requirements/static/ci/py3.9/linux.txt # napalm + # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in tomli==2.0.1 - # via pytest + # via + # -c requirements/static/ci/py3.9/linux.txt + # pytest transitions==0.8.9 - # via junos-eznc -typing-extensions==3.10.0.2 # via + # -c requirements/static/ci/py3.9/linux.txt + # junos-eznc +typing-extensions==4.8.0 + # via + # -c requirements/static/ci/py3.9/linux.txt # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # botocore + # docker # kubernetes # python-etcd # requests - # responses vcert==0.7.4 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -virtualenv==20.8.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +virtualenv==20.7.2 + # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # pytest-salt-factories -watchdog==2.1.5 - # via -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in websocket-client==0.40.0 # via + # -c requirements/static/ci/py3.9/linux.txt # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.9/linux.txt # moto # pytest-httpserver xmltodict==0.12.0 # via + # -c requirements/static/ci/py3.9/linux.txt # moto # pywinrm yamlordereddictloader==0.4.0 - # via junos-eznc + # via + # -c requirements/static/ci/py3.9/linux.txt + # junos-eznc yarl==1.7.2 - # via aiohttp -zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/py3.9/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.9/darwin-crypto.txt b/requirements/static/ci/py3.9/darwin-crypto.txt index cccccdbbd3da..677e1c6cdf3e 100644 --- a/requirements/static/ci/py3.9/darwin-crypto.txt +++ b/requirements/static/ci/py3.9/darwin-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/darwin-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.9/darwin.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/darwin-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.9/darwin.txt b/requirements/static/ci/py3.9/darwin.txt index 471416696b3f..4500475ede2a 100644 --- a/requirements/static/ci/py3.9/darwin.txt +++ b/requirements/static/ci/py3.9/darwin.txt @@ -2,18 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/darwin.txt --pip-args='--constraint=requirements/static/pkg/py3.9/darwin.txt' requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/darwin.txt requirements/darwin.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/darwin.in requirements/static/pkg/darwin.in # -adal==1.2.5 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp apache-libcloud==2.5.0 ; sys_platform != "win32" # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt # -r requirements/static/ci/common.in asn1crypto==1.3.0 @@ -22,7 +19,7 @@ asn1crypto==1.3.0 # oscrypto async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -31,311 +28,13 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.26 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.51 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -350,33 +49,36 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt # -r requirements/static/ci/common.in ciscoconfparse==1.5.19 @@ -388,16 +90,15 @@ clustershell==1.8.1 colorama==0.4.3 # via ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko @@ -407,6 +108,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt # pytest-skip-markers dnspython==1.16.0 @@ -414,11 +116,9 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -438,51 +138,66 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 # via cassandra-driver -gitdb==4.0.5 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitdb==4.0.7 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt # -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/darwin.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/darwin.txt iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt # junos-eznc # moto # napalm jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -500,20 +215,23 @@ keyring==5.7.1 kubernetes==3.0.0 # via -r requirements/static/ci/common.in linode-python==1.1.1 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/darwin.txt looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc # napalm # ncclient mako==1.2.2 - # via - # -r requirements/darwin.txt - # -r requirements/static/ci/common.in + # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt # jinja2 # mako @@ -521,117 +239,28 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/darwin.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==8.2.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.19 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp # yarl -napalm==3.1.0 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" # via -r requirements/static/ci/common.in ncclient==0.6.4 # via junos-eznc @@ -644,13 +273,13 @@ netmiko==3.2.0 # via napalm ntc-templates==1.4.1 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 # via @@ -659,7 +288,7 @@ paramiko==2.10.1 # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via # -r requirements/static/ci/common.in # ciscoconfparse @@ -672,9 +301,12 @@ platformdirs==2.2.0 pluggy==0.13.1 # via pytest portend==2.6 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -683,32 +315,37 @@ pyasn1-modules==0.2.4 # via google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt # pyasn1-modules # rsa pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt # -r requirements/static/ci/common.in # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/crypto.txt pyeapi==0.8.3 # via napalm -pygit2==1.9.1 ; python_version >= "3.7" +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/darwin.in -pyjwt==2.4.0 - # via adal -pynacl==1.3.0 - # via paramiko -pyopenssl==19.0.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt # etcd3-py pyparsing==3.0.9 - # via - # junos-eznc - # packaging + # via junos-eznc pyrsistent==0.17.3 # via jsonschema pyserial==3.4 @@ -722,13 +359,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -737,13 +374,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -753,14 +386,11 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/darwin.txt - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -769,48 +399,44 @@ python-dateutil==2.8.0 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/darwin.txt pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # pyvmomi - # requests-oauthlib # responses # vcert # vultr @@ -830,27 +456,23 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/darwin.txt six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kubernetes - # mock - # msrestazure # ncclient # paramiko - # pynacl - # pyopenssl # python-dateutil # pyvmomi # responses @@ -859,14 +481,18 @@ six==1.16.0 # vcert # virtualenv # websocket-client -smmap==3.0.2 - # via gitdb -sqlparse==0.4.2 +smmap==4.0.0 + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # gitdb +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # portend terminal==0.4.0 # via ntc-templates textfsm==1.1.0 @@ -875,20 +501,24 @@ textfsm==1.1.0 # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/darwin.txt toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc typing-extensions==4.2.0 # via # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -899,14 +529,18 @@ virtualenv==20.7.2 # -r requirements/static/ci/common.in # pytest-salt-factories vultr==1.0.1 - # via -r requirements/darwin.txt + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # -r requirements/darwin.txt watchdog==0.10.3 # via -r requirements/static/ci/common.in websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver @@ -921,9 +555,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.9/darwin.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.9/docs.txt b/requirements/static/ci/py3.9/docs.txt index d364b113a864..b479054a4c15 100644 --- a/requirements/static/ci/py3.9/docs.txt +++ b/requirements/static/ci/py3.9/docs.txt @@ -2,17 +2,17 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/docs.txt requirements/base.txt requirements/static/ci/docs.in requirements/zeromq.txt # alabaster==0.7.12 # via sphinx babel==2.9.1 # via sphinx -certifi==2022.12.7 +certifi==2023.07.22 # via # -c requirements/static/ci/py3.9/linux.txt # requests -chardet==3.0.4 +charset-normalizer==3.2.0 # via # -c requirements/static/ci/py3.9/linux.txt # requests @@ -34,7 +34,7 @@ distro==1.5.0 # -r requirements/base.txt docutils==0.19 # via sphinx -idna==2.8 +idna==3.2 # via # -c requirements/static/ci/py3.9/linux.txt # requests @@ -82,7 +82,7 @@ looseversion==1.0.2 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt -markdown-it-py==2.1.0 +markdown-it-py==2.2.0 # via # mdit-py-plugins # myst-docutils @@ -134,7 +134,7 @@ pytz==2022.1 # -c requirements/static/ci/py3.9/linux.txt # babel # tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt @@ -143,7 +143,7 @@ pyzmq==23.2.0 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt @@ -181,13 +181,13 @@ tempora==4.1.1 # via # -c requirements/static/ci/py3.9/linux.txt # portend -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # -c requirements/static/ci/py3.9/linux.txt # myst-docutils uc-micro-py==1.0.1 # via linkify-it-py -urllib3==1.26.6 +urllib3==1.26.18 # via # -c requirements/static/ci/py3.9/linux.txt # requests diff --git a/requirements/static/ci/py3.9/freebsd-crypto.txt b/requirements/static/ci/py3.9/freebsd-crypto.txt index 401b46c330fe..01b34b6978df 100644 --- a/requirements/static/ci/py3.9/freebsd-crypto.txt +++ b/requirements/static/ci/py3.9/freebsd-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/freebsd-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.9/freebsd.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/freebsd-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.9/freebsd.txt b/requirements/static/ci/py3.9/freebsd.txt index 2ac12e3f50d0..569dc344f7d6 100644 --- a/requirements/static/ci/py3.9/freebsd.txt +++ b/requirements/static/ci/py3.9/freebsd.txt @@ -2,13 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/freebsd.txt --pip-args='--constraint=requirements/static/pkg/py3.9/freebsd.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/freebsd.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/freebsd.in requirements/static/pkg/freebsd.in requirements/zeromq.txt # -adal==1.2.5 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -20,7 +16,7 @@ asn1crypto==1.3.0 # oscrypto async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -29,311 +25,13 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.26 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.51 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -348,33 +46,36 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.24.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/freebsd.in ciscoconfparse==1.5.19 @@ -386,15 +87,15 @@ clustershell==1.8.3 colorama==0.4.3 # via ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via - # adal - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/static/pkg/freebsd.in # etcd3-py # moto # paramiko @@ -404,6 +105,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt # -r requirements/static/pkg/freebsd.in # pytest-skip-markers @@ -412,11 +114,9 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -436,48 +136,60 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.2.1.post1 # via cassandra-driver -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/freebsd.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/static/pkg/freebsd.in iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt # junos-eznc # moto # napalm jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -499,7 +211,9 @@ kubernetes==3.0.0 libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc @@ -509,6 +223,7 @@ mako==1.2.2 # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt # jinja2 # mako @@ -516,117 +231,28 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/freebsd.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.19 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp # yarl -napalm==3.1.0 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" # via -r requirements/static/ci/common.in ncclient==0.6.4 # via junos-eznc @@ -639,13 +265,13 @@ netmiko==3.2.0 # via napalm ntc-templates==1.4.1 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -655,7 +281,7 @@ paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via # -r requirements/static/ci/common.in # ciscoconfparse @@ -668,9 +294,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -683,29 +312,33 @@ pyasn1==0.4.8 # rsa pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/freebsd.in # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/crypto.txt pyeapi==0.8.3 # via napalm -pygit2==1.8.0 ; python_version >= "3.7" +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/freebsd.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in -pyjwt==2.4.0 - # via adal -pynacl==1.3.0 - # via paramiko -pyopenssl==19.1.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/static/pkg/freebsd.in # etcd3-py pyparsing==3.0.9 - # via - # junos-eznc - # packaging + # via junos-eznc pyrsistent==0.17.3 # via jsonschema pyserial==3.4 @@ -719,13 +352,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -734,13 +367,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -750,14 +379,11 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/static/pkg/freebsd.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -766,48 +392,44 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/static/pkg/freebsd.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/zeromq.txt # pytest-salt-factories -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # pyvmomi - # requests-oauthlib # responses # vcert responses==0.10.6 @@ -826,29 +448,25 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/static/pkg/freebsd.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # mock # more-itertools - # msrestazure # ncclient # paramiko - # pynacl - # pyopenssl # python-dateutil # pyvmomi # responses @@ -857,14 +475,16 @@ six==1.16.0 # vcert # virtualenv # websocket-client -smmap==3.0.4 +smmap==4.0.0 # via gitdb -sqlparse==0.4.2 +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # portend terminal==0.4.0 # via ntc-templates textfsm==1.1.0 @@ -873,20 +493,24 @@ textfsm==1.1.0 # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/freebsd.in + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # -r requirements/static/pkg/freebsd.in toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -902,7 +526,9 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver @@ -915,9 +541,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==1.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.9/freebsd.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.9/invoke.txt b/requirements/static/ci/py3.9/invoke.txt deleted file mode 100644 index 3e1879c97e12..000000000000 --- a/requirements/static/ci/py3.9/invoke.txt +++ /dev/null @@ -1,14 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --output-file=requirements/static/ci/py3.9/invoke.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/static/ci/invoke.in -# -blessings==1.7 - # via -r requirements/static/ci/invoke.in -invoke==1.4.1 - # via -r requirements/static/ci/invoke.in -pyyaml==5.4.1 - # via -r requirements/static/ci/invoke.in -six==1.16.0 - # via blessings diff --git a/requirements/static/ci/py3.9/lint.txt b/requirements/static/ci/py3.9/lint.txt index bcee7fb59a80..bad348247b8a 100644 --- a/requirements/static/ci/py3.9/lint.txt +++ b/requirements/static/ci/py3.9/lint.txt @@ -2,516 +2,332 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/lint.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/lint.txt requirements/base.txt requirements/static/ci/common.in requirements/static/ci/lint.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.7 +aiohttp==3.9.0 # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 - # via etcd3-py + # -c requirements/static/ci/py3.9/linux.txt + # etcd3-py aiosignal==1.2.0 - # via aiohttp + # via + # -c requirements/static/ci/py3.9/linux.txt + # aiohttp ansible-core==2.14.1 - # via ansible + # via + # -c requirements/static/ci/py3.9/linux.txt + # ansible ansible==7.1.0 ; python_version >= "3.9" - # via -r requirements/static/ci/linux.in -apache-libcloud==3.3.1 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in +apache-libcloud==2.5.0 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in apscheduler==3.6.3 - # via python-telegram-bot -asn1crypto==1.4.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # python-telegram-bot +asn1crypto==1.3.0 + # via + # -c requirements/static/ci/py3.9/linux.txt # certvalidator # oscrypto astroid==2.3.3 # via pylint async-timeout==4.0.2 - # via aiohttp -attrs==21.2.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # aiohttp +attrs==23.1.0 + # via + # -c requirements/static/ci/py3.9/linux.txt # aiohttp # jsonschema -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.27 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.6 - # via azure -azure-datalake-store==0.0.52 - # via azure -azure-eventgrid==1.3.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.2 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.5.0 - # via azure-mgmt -azure-mgmt-containerregistry==2.8.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.6.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.1 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.7.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.1.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.9.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.2.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.1.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.7 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.2 - # via - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 - # via virtualenv -bcrypt==3.2.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # virtualenv +bcrypt==3.1.6 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # moto boto==2.49.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in botocore==1.24.46 # via + # -c requirements/static/ci/py3.9/linux.txt # boto3 # moto # s3transfer cachetools==4.2.2 # via + # -c requirements/static/ci/py3.9/linux.txt # google-auth # python-telegram-bot -cassandra-driver==3.25.0 - # via -r requirements/static/ci/common.in -certifi==2022.12.7 +cassandra-driver==3.23.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # python-telegram-bot # requests certvalidator==0.11.1 - # via vcert + # via + # -c requirements/static/ci/py3.9/linux.txt + # vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -charset-normalizer==2.0.4 +charset-normalizer==3.2.0 # via - # aiohttp + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # requests -cheetah3==3.2.6.post1 - # via -r requirements/static/ci/common.in +cheetah3==3.2.6.post2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in -ciscoconfparse==1.5.46 - # via napalm -click==8.0.1 - # via geomet +ciscoconfparse==1.5.19 + # via + # -c requirements/static/ci/py3.9/linux.txt + # napalm +click==7.1.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # geomet clustershell==1.8.3 - # via -r requirements/static/ci/common.in -colorama==0.4.4 - # via ciscoconfparse + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +colorama==0.4.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt -croniter==1.0.15 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -cryptography==3.4.7 # via - # adal + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/base.txt +croniter==0.3.29 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in # ansible-core - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko # pyopenssl # vcert distlib==0.3.2 - # via virtualenv -distro==1.6.0 - # via -r requirements/base.txt -dnspython==2.1.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # virtualenv +distro==1.5.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/base.txt +dnspython==1.16.0 + # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.0 - # via -r requirements/static/ci/common.in -etcd3-py==0.1.6 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +docker==6.1.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/lint.in +etcd3-py==0.1.6 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in filelock==3.0.12 - # via virtualenv + # via + # -c requirements/static/ci/py3.9/linux.txt + # virtualenv frozenlist==1.3.0 # via + # -c requirements/static/ci/py3.9/linux.txt # aiohttp # aiosignal future==0.18.3 # via + # -c requirements/static/ci/py3.9/linux.txt # napalm # textfsm genshi==0.7.5 - # via -r requirements/static/ci/common.in -geomet==0.2.1.post1 - # via cassandra-driver + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +geomet==0.1.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" - # via -r requirements/static/ci/common.in -google-auth==2.0.1 - # via kubernetes + # via + # -c requirements/static/ci/py3.9/linux.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +google-auth==2.1.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # kubernetes hglib==2.6.1 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # etcd3-py # requests # yarl -immutables==0.16 - # via contextvars +immutables==0.15 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/static/pkg/linux.in -ipaddress==1.0.23 - # via kubernetes -isodate==0.6.0 - # via msrest + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in +ipaddress==1.0.22 + # via + # -c requirements/static/ci/py3.9/linux.txt + # kubernetes isort==4.3.21 # via pylint jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy -jaraco.functools==3.3.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # cherrypy +jaraco.functools==2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # ansible-core # junos-eznc # moto # napalm -jmespath==0.10.0 +jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 # botocore jsonschema==3.2.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in junos-eznc==2.4.0 ; sys_platform != "win32" and python_version <= "3.10" # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # napalm -jxmlease==1.0.3 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in -kazoo==2.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in +jxmlease==1.0.1 ; sys_platform != "win32" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +kazoo==2.6.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in keyring==5.7.1 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in kubernetes==3.0.0 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in lazy-object-proxy==1.4.3 # via astroid -libnacl==1.8.0 ; sys_platform != "win32" and sys_platform != "darwin" - # via -r requirements/static/ci/common.in -loguru==0.6.0 - # via ciscoconfparse +libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc # napalm # ncclient mako==1.2.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # jinja2 # mako @@ -520,239 +336,221 @@ markupsafe==2.1.2 mccabe==0.6.1 # via pylint mercurial==6.0.1 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in modernize==0.5 # via saltpylint -more-itertools==8.8.0 +more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" - # via -r requirements/static/ci/common.in +moto==3.0.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in msgpack==1.0.2 - # via -r requirements/base.txt -msrest==0.6.21 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-managementpartner - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.4 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/base.txt multidict==6.0.2 # via + # -c requirements/static/ci/py3.9/linux.txt # aiohttp # yarl -napalm==3.3.1 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" - # via -r requirements/static/ci/common.in -ncclient==0.6.12 +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +ncclient==0.6.4 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc - # napalm -netaddr==0.8.0 +netaddr==0.7.19 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc # napalm # pyeapi -netmiko==3.4.0 - # via napalm -ntc-templates==2.2.2 +netmiko==3.2.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # napalm +ntc-templates==1.4.1 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc - # netmiko -oauthlib==3.2.2 - # via requests-oauthlib -oscrypto==1.2.1 - # via certvalidator -packaging==21.3 +oscrypto==1.2.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # certvalidator +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # ansible-core + # docker paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # junos-eznc # napalm # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # ciscoconfparse pathspec==0.9.0 - # via yamllint + # via + # -c requirements/static/ci/py3.9/linux.txt + # yamllint +pathtools==0.1.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # watchdog platformdirs==2.2.0 - # via virtualenv -portend==2.7.1 - # via cherrypy + # via + # -c requirements/static/ci/py3.9/linux.txt + # virtualenv +portend==2.4 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # cherrypy psutil==5.8.0 - # via -r requirements/base.txt -pyasn1-modules==0.2.8 - # via google-auth + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/base.txt +pyasn1-modules==0.2.4 + # via + # -c requirements/static/ci/py3.9/linux.txt + # google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/py3.9/linux.txt # pyasn1-modules # rsa pycodestyle==2.5.0 # via saltpylint pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in # cffi -pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pyeapi==0.8.4 - # via napalm -pygit2==1.6.1 ; python_version > "3.8" - # via -r requirements/static/ci/linux.in +pycryptodomex==3.9.8 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/crypto.txt +pyeapi==0.8.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # napalm +pygit2==1.13.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in pyiface==0.0.11 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in pyjwt==2.4.0 # via - # adal + # -c requirements/static/ci/py3.9/linux.txt # twilio pylint==2.4.4 # via # -r requirements/static/ci/lint.in # saltpylint -pymysql==1.0.2 ; python_version > "3.5" - # via -r requirements/static/ci/linux.in -pynacl==1.4.0 - # via paramiko -pyopenssl==20.0.1 +pymysql==1.0.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in +pynacl==1.5.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc - # packaging -pyrsistent==0.18.0 - # via jsonschema -pyserial==3.5 +pyrsistent==0.17.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # jsonschema +pyserial==3.4 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc # netmiko python-consul==1.1.0 - # via -r requirements/static/ci/linux.in -python-dateutil==2.8.2 # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in +python-dateutil==2.8.1 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes # moto # vcert python-etcd==0.4.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in -python-telegram-bot==13.7 ; python_version > "3.5" - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # apscheduler # moto # python-telegram-bot # tempora # twilio -pyvmomi==7.0.2 - # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyvmomi==6.7.1.2018.12 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # ansible-core # clustershell @@ -762,62 +560,81 @@ pyyaml==5.4.1 # yamllint # yamlordereddictloader pyzmq==23.2.0 - # via -r requirements/zeromq.txt + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/zeromq.txt redis-py-cluster==2.1.3 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in redis==3.5.3 - # via redis-py-cluster -requests-oauthlib==1.3.0 - # via msrest -requests==2.26.0 # via + # -c requirements/static/ci/py3.9/linux.txt + # redis-py-cluster +requests==2.31.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # python-consul # pyvmomi - # requests-oauthlib # responses # twilio # vcert resolvelib==0.5.4 - # via ansible-core -responses==0.13.4 - # via moto + # via + # -c requirements/static/ci/py3.9/linux.txt + # ansible-core +responses==0.10.6 + # via + # -c requirements/static/ci/py3.9/linux.txt + # moto rfc3987==1.3.8 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 - # via google-auth -s3transfer==0.5.0 - # via boto3 -saltpylint==2020.9.28 + # via + # -c requirements/static/ci/py3.9/linux.txt + # google-auth +s3transfer==0.5.2 + # via + # -c requirements/static/ci/py3.9/linux.txt + # boto3 +saltpylint==2023.8.3 # via -r requirements/static/ci/lint.in -scp==0.13.6 +scp==0.13.2 # via + # -c requirements/static/ci/py3.9/linux.txt # junos-eznc # napalm # netmiko semantic-version==2.9.0 - # via etcd3-py + # via + # -c requirements/static/ci/py3.9/linux.txt + # etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # apscheduler # astroid # bcrypt @@ -826,16 +643,13 @@ six==1.16.0 # etcd3-py # genshi # geomet - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # msrestazure + # more-itertools # ncclient # paramiko - # pynacl - # pyopenssl # python-consul # python-dateutil # pyvmomi @@ -846,70 +660,128 @@ six==1.16.0 # virtualenv # websocket-client slack-bolt==1.15.5 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in slack-sdk==3.19.5 - # via slack-bolt + # via + # -c requirements/static/ci/py3.9/linux.txt + # slack-bolt smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # gitdb +sqlparse==0.4.4 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in strict-rfc3339==0.7 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in tempora==4.1.1 - # via portend -tenacity==8.0.1 - # via netmiko -textfsm==1.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # portend +terminal==0.4.0 + # via + # -c requirements/static/ci/py3.9/linux.txt + # ntc-templates +textfsm==1.1.0 + # via + # -c requirements/static/ci/py3.9/linux.txt # napalm + # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 # via + # -c requirements/static/ci/py3.9/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/ci/lint.in tornado==6.1 - # via python-telegram-bot -transitions==0.8.8 - # via junos-eznc + # via + # -c requirements/static/ci/py3.9/linux.txt + # python-telegram-bot +transitions==0.8.9 + # via + # -c requirements/static/ci/py3.9/linux.txt + # junos-eznc twilio==7.9.2 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in tzlocal==3.0 - # via apscheduler -urllib3==1.26.6 # via + # -c requirements/static/ci/py3.9/linux.txt + # apscheduler +urllib3==1.26.18 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt # botocore + # docker # kubernetes # python-etcd # requests - # responses vcert==0.7.4 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in virtualenv==20.7.2 - # via -r requirements/static/ci/common.in -watchdog==2.1.5 - # via -r requirements/static/ci/common.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +watchdog==0.10.3 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in websocket-client==0.40.0 # via + # -c requirements/static/ci/py3.9/linux.txt # docker # kubernetes -werkzeug==2.2.3 - # via moto +wempy==0.2.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/common.in +werkzeug==3.0.1 + # via + # -c requirements/static/ci/py3.9/linux.txt + # moto wrapt==1.11.1 # via astroid xmltodict==0.12.0 - # via moto + # via + # -c requirements/static/ci/py3.9/linux.txt + # moto yamllint==1.26.3 - # via -r requirements/static/ci/linux.in + # via + # -c requirements/static/ci/py3.9/linux.txt + # -r requirements/static/ci/linux.in yamlordereddictloader==0.4.0 - # via junos-eznc + # via + # -c requirements/static/ci/py3.9/linux.txt + # junos-eznc yarl==1.7.2 - # via aiohttp -zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/py3.9/linux.txt + # aiohttp +zc.lockfile==1.4 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -c requirements/static/ci/py3.9/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.9/linux-crypto.txt b/requirements/static/ci/py3.9/linux-crypto.txt index 443368d9ad14..32c40b41ec8f 100644 --- a/requirements/static/ci/py3.9/linux-crypto.txt +++ b/requirements/static/ci/py3.9/linux-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/linux-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/linux-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.9/linux.txt b/requirements/static/ci/py3.9/linux.txt index 9b910c3f48e1..8f5611c0f920 100644 --- a/requirements/static/ci/py3.9/linux.txt +++ b/requirements/static/ci/py3.9/linux.txt @@ -2,13 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/linux.txt --pip-args='--constraint=requirements/static/pkg/py3.9/linux.txt' requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/linux.txt requirements/base.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/linux.in requirements/static/pkg/linux.in requirements/zeromq.txt # -adal==1.2.3 - # via - # azure-datalake-store - # msrestazure -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp @@ -26,7 +22,7 @@ asn1crypto==1.3.0 # oscrypto async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -35,316 +31,13 @@ attrs==20.3.0 # pytest-shell-utilities # pytest-skip-markers # pytest-system-statistics -azure-applicationinsights==0.1.0 - # via azure -azure-batch==4.1.3 - # via azure -azure-common==1.1.18 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-table - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy - # azure-storage-blob - # azure-storage-common - # azure-storage-file - # azure-storage-queue -azure-cosmosdb-nspkg==2.0.2 - # via azure-cosmosdb-table -azure-cosmosdb-table==1.0.5 - # via azure -azure-datalake-store==0.0.44 - # via azure -azure-eventgrid==1.2.0 - # via azure -azure-graphrbac==0.40.0 - # via azure -azure-keyvault==1.1.0 - # via azure -azure-loganalytics==0.1.0 - # via azure -azure-mgmt-advisor==1.0.1 - # via azure-mgmt -azure-mgmt-applicationinsights==0.1.1 - # via azure-mgmt -azure-mgmt-authorization==0.50.0 - # via azure-mgmt -azure-mgmt-batch==5.0.1 - # via azure-mgmt -azure-mgmt-batchai==2.0.0 - # via azure-mgmt -azure-mgmt-billing==0.2.0 - # via azure-mgmt -azure-mgmt-cdn==3.1.0 - # via azure-mgmt -azure-mgmt-cognitiveservices==3.0.0 - # via azure-mgmt -azure-mgmt-commerce==1.0.1 - # via azure-mgmt -azure-mgmt-compute==4.6.0 - # via azure-mgmt -azure-mgmt-consumption==2.0.0 - # via azure-mgmt -azure-mgmt-containerinstance==1.4.1 - # via azure-mgmt -azure-mgmt-containerregistry==2.7.0 - # via azure-mgmt -azure-mgmt-containerservice==4.4.0 - # via azure-mgmt -azure-mgmt-cosmosdb==0.4.1 - # via azure-mgmt -azure-mgmt-datafactory==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-analytics==0.6.0 - # via azure-mgmt -azure-mgmt-datalake-nspkg==3.0.1 - # via - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store -azure-mgmt-datalake-store==0.5.0 - # via azure-mgmt -azure-mgmt-datamigration==1.0.0 - # via azure-mgmt -azure-mgmt-devspaces==0.1.0 - # via azure-mgmt -azure-mgmt-devtestlabs==2.2.0 - # via azure-mgmt -azure-mgmt-dns==2.1.0 - # via azure-mgmt -azure-mgmt-eventgrid==1.0.0 - # via azure-mgmt -azure-mgmt-eventhub==2.5.0 - # via azure-mgmt -azure-mgmt-hanaonazure==0.1.1 - # via azure-mgmt -azure-mgmt-iotcentral==0.1.0 - # via azure-mgmt -azure-mgmt-iothub==0.5.0 - # via azure-mgmt -azure-mgmt-iothubprovisioningservices==0.2.0 - # via azure-mgmt -azure-mgmt-keyvault==1.1.0 - # via azure-mgmt -azure-mgmt-loganalytics==0.2.0 - # via azure-mgmt -azure-mgmt-logic==3.0.0 - # via azure-mgmt -azure-mgmt-machinelearningcompute==0.4.1 - # via azure-mgmt -azure-mgmt-managementgroups==0.1.0 - # via azure-mgmt -azure-mgmt-managementpartner==0.1.0 - # via azure-mgmt -azure-mgmt-maps==0.1.0 - # via azure-mgmt -azure-mgmt-marketplaceordering==0.1.0 - # via azure-mgmt -azure-mgmt-media==1.0.0 - # via azure-mgmt -azure-mgmt-monitor==0.5.2 - # via azure-mgmt -azure-mgmt-msi==0.2.0 - # via azure-mgmt -azure-mgmt-network==2.6.0 - # via azure-mgmt -azure-mgmt-notificationhubs==2.0.0 - # via azure-mgmt -azure-mgmt-nspkg==3.0.2 - # via - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-consumption - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-nspkg - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web -azure-mgmt-policyinsights==0.1.0 - # via azure-mgmt -azure-mgmt-powerbiembedded==2.0.0 - # via azure-mgmt -azure-mgmt-rdbms==1.8.0 - # via azure-mgmt -azure-mgmt-recoveryservices==0.3.0 - # via azure-mgmt -azure-mgmt-recoveryservicesbackup==0.3.0 - # via azure-mgmt -azure-mgmt-redis==5.0.0 - # via azure-mgmt -azure-mgmt-relay==0.1.0 - # via azure-mgmt -azure-mgmt-reservations==0.2.1 - # via azure-mgmt -azure-mgmt-resource==2.1.0 - # via azure-mgmt -azure-mgmt-scheduler==2.0.0 - # via azure-mgmt -azure-mgmt-search==2.0.0 - # via azure-mgmt -azure-mgmt-servicebus==0.5.3 - # via azure-mgmt -azure-mgmt-servicefabric==0.2.0 - # via azure-mgmt -azure-mgmt-signalr==0.1.1 - # via azure-mgmt -azure-mgmt-sql==0.9.1 - # via azure-mgmt -azure-mgmt-storage==2.0.0 - # via azure-mgmt -azure-mgmt-subscription==0.2.0 - # via azure-mgmt -azure-mgmt-trafficmanager==0.50.0 - # via azure-mgmt -azure-mgmt-web==0.35.0 - # via azure-mgmt -azure-mgmt==4.0.0 - # via azure -azure-nspkg==3.0.2 - # via - # azure-applicationinsights - # azure-batch - # azure-cosmosdb-nspkg - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-loganalytics - # azure-mgmt-nspkg - # azure-servicebus - # azure-servicefabric - # azure-servicemanagement-legacy -azure-servicebus==0.21.1 - # via azure -azure-servicefabric==6.3.0.0 - # via azure -azure-servicemanagement-legacy==0.20.6 - # via azure -azure-storage-blob==1.5.0 - # via azure -azure-storage-common==1.4.0 - # via - # azure-cosmosdb-table - # azure-storage-blob - # azure-storage-file - # azure-storage-queue -azure-storage-file==1.4.0 - # via azure -azure-storage-queue==1.4.0 - # via azure -azure==4.0.0 ; sys_platform != "win32" - # via -r requirements/static/ci/common.in backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==3.1.6 # via + # -r requirements/static/ci/common.in # paramiko - # passlib -boto3==1.21.46 ; python_version >= "3.6" +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -355,42 +48,43 @@ botocore==1.24.46 # boto3 # moto # s3transfer -cached-property==1.5.2 - # via pygit2 cachetools==4.2.2 # via # google-auth # python-telegram-bot cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/static/ci/common.in # kubernetes - # msrest # python-telegram-bot # requests certvalidator==0.11.1 # via vcert cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/static/ci/common.in - # azure-datalake-store # bcrypt # cryptography # napalm # pygit2 # pynacl -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in ciscoconfparse==1.5.19 @@ -402,16 +96,16 @@ clustershell==1.8.3 colorama==0.4.3 # via ciscoconfparse contextvars==2.4 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/base.txt croniter==0.3.29 ; sys_platform != "win32" # via -r requirements/static/ci/common.in -cryptography==3.3.2 +cryptography==41.0.7 # via - # adal + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/pkg/linux.in # ansible-core - # azure-cosmosdb-table - # azure-keyvault - # azure-storage-common # etcd3-py # moto # paramiko @@ -421,6 +115,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # pytest-skip-markers dnspython==1.16.0 @@ -428,11 +123,9 @@ dnspython==1.16.0 # -r requirements/static/ci/common.in # ciscoconfparse # python-etcd -docker==5.0.3 - # via - # -r requirements/static/ci/common.in - # pytest-salt-factories -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -452,42 +145,53 @@ genshi==0.7.5 # via -r requirements/static/ci/common.in geomet==0.1.2 # via cassandra-driver -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/static/ci/common.in -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes hglib==2.6.1 # via -r requirements/static/ci/linux.in -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/pkg/linux.in iniconfig==1.0.1 # via pytest ipaddress==1.0.22 # via kubernetes -isodate==0.6.0 - # via msrest jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # jaraco.collections jaraco.collections==3.4.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # ansible-core # junos-eznc @@ -495,6 +199,7 @@ jinja2==3.1.2 # napalm jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -516,7 +221,9 @@ kubernetes==3.0.0 libnacl==1.7.1 ; sys_platform != "win32" and sys_platform != "darwin" # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/base.txt lxml==4.9.1 # via # junos-eznc @@ -526,6 +233,7 @@ mako==1.2.2 # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # jinja2 # mako @@ -533,114 +241,28 @@ markupsafe==2.1.2 # werkzeug mercurial==6.0.1 # via -r requirements/static/ci/linux.in -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==5.0.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # pytest-salt-factories -msrest==0.6.14 - # via - # azure-applicationinsights - # azure-eventgrid - # azure-keyvault - # azure-loganalytics - # azure-mgmt-cdn - # azure-mgmt-compute - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-dns - # azure-mgmt-eventhub - # azure-mgmt-keyvault - # azure-mgmt-media - # azure-mgmt-network - # azure-mgmt-rdbms - # azure-mgmt-resource - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-servicefabric - # msrestazure -msrestazure==0.6.3 - # via - # azure-batch - # azure-eventgrid - # azure-graphrbac - # azure-keyvault - # azure-mgmt-advisor - # azure-mgmt-applicationinsights - # azure-mgmt-authorization - # azure-mgmt-batch - # azure-mgmt-batchai - # azure-mgmt-billing - # azure-mgmt-cdn - # azure-mgmt-cognitiveservices - # azure-mgmt-commerce - # azure-mgmt-compute - # azure-mgmt-consumption - # azure-mgmt-containerinstance - # azure-mgmt-containerregistry - # azure-mgmt-containerservice - # azure-mgmt-cosmosdb - # azure-mgmt-datafactory - # azure-mgmt-datalake-analytics - # azure-mgmt-datalake-store - # azure-mgmt-datamigration - # azure-mgmt-devspaces - # azure-mgmt-devtestlabs - # azure-mgmt-dns - # azure-mgmt-eventgrid - # azure-mgmt-eventhub - # azure-mgmt-hanaonazure - # azure-mgmt-iotcentral - # azure-mgmt-iothub - # azure-mgmt-iothubprovisioningservices - # azure-mgmt-keyvault - # azure-mgmt-loganalytics - # azure-mgmt-logic - # azure-mgmt-machinelearningcompute - # azure-mgmt-managementgroups - # azure-mgmt-managementpartner - # azure-mgmt-maps - # azure-mgmt-marketplaceordering - # azure-mgmt-media - # azure-mgmt-monitor - # azure-mgmt-msi - # azure-mgmt-network - # azure-mgmt-notificationhubs - # azure-mgmt-policyinsights - # azure-mgmt-powerbiembedded - # azure-mgmt-rdbms - # azure-mgmt-recoveryservices - # azure-mgmt-recoveryservicesbackup - # azure-mgmt-redis - # azure-mgmt-relay - # azure-mgmt-reservations - # azure-mgmt-resource - # azure-mgmt-scheduler - # azure-mgmt-search - # azure-mgmt-servicebus - # azure-mgmt-servicefabric - # azure-mgmt-signalr - # azure-mgmt-sql - # azure-mgmt-storage - # azure-mgmt-subscription - # azure-mgmt-trafficmanager - # azure-mgmt-web multidict==6.0.2 # via # aiohttp # yarl -napalm==3.1.0 ; sys_platform != "win32" and python_version > "3.6" and python_version < "3.10" +napalm==3.1.0 ; sys_platform != "win32" and python_version < "3.10" # via -r requirements/static/ci/common.in ncclient==0.6.4 # via junos-eznc @@ -653,14 +275,14 @@ netmiko==3.2.0 # via napalm ntc-templates==1.4.1 # via junos-eznc -oauthlib==3.2.2 - # via requests-oauthlib oscrypto==1.2.0 # via certvalidator packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # ansible-core + # docker # pytest paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # via @@ -670,7 +292,7 @@ paramiko==2.10.1 ; sys_platform != "win32" and sys_platform != "darwin" # ncclient # netmiko # scp -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via # -r requirements/static/ci/common.in # ciscoconfparse @@ -683,9 +305,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -698,29 +323,35 @@ pyasn1==0.4.8 # rsa pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/static/ci/common.in # -r requirements/static/pkg/linux.in # cffi pycryptodomex==3.9.8 - # via -r requirements/crypto.txt + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/crypto.txt pyeapi==0.8.3 # via napalm -pygit2==1.5.0 ; python_version > "3.8" +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/linux.in pyiface==0.0.11 # via -r requirements/static/ci/linux.in pyinotify==0.9.6 ; sys_platform != "win32" and sys_platform != "darwin" and platform_system != "openbsd" # via -r requirements/static/ci/common.in pyjwt==2.4.0 - # via - # adal - # twilio -pymysql==1.0.2 ; python_version > "3.5" + # via twilio +pymysql==1.0.2 # via -r requirements/static/ci/linux.in -pynacl==1.3.0 - # via paramiko -pyopenssl==19.1.0 +pynacl==1.5.0 # via + # -r requirements/static/ci/common.in + # paramiko +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/static/pkg/linux.in # etcd3-py pyparsing==3.0.9 @@ -738,13 +369,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories[docker]==1.0.0rc21 ; sys_platform != "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -753,13 +384,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -769,16 +396,13 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-consul==1.1.0 # via -r requirements/static/ci/linux.in python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/static/pkg/linux.in - # adal - # azure-cosmosdb-table - # azure-storage-common # botocore # croniter # kubernetes @@ -787,60 +411,55 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/static/pkg/linux.in -python-telegram-bot==13.7 ; python_version > "3.5" + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/pkg/linux.in +python-telegram-bot==13.7 # via -r requirements/static/ci/linux.in pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # apscheduler # moto # python-telegram-bot # tempora # twilio - # tzlocal pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # ansible-core # clustershell # junos-eznc # kubernetes # napalm + # pytest-salt-factories # yamllint # yamlordereddictloader pyzmq==23.2.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/zeromq.txt # pytest-salt-factories redis-py-cluster==2.1.3 # via -r requirements/static/ci/linux.in redis==3.5.3 # via redis-py-cluster -requests-oauthlib==1.3.0 - # via msrest -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # -r requirements/base.txt # -r requirements/static/ci/common.in - # adal # apache-libcloud - # azure-cosmosdb-table - # azure-datalake-store - # azure-keyvault - # azure-servicebus - # azure-servicemanagement-legacy - # azure-storage-common # docker # etcd3-py # kubernetes # moto - # msrest # napalm # python-consul # pyvmomi - # requests-oauthlib # responses # twilio # vcert @@ -851,7 +470,9 @@ responses==0.10.6 rfc3987==1.3.8 # via -r requirements/static/ci/common.in rpm-vercmp==0.1.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/pkg/linux.in rsa==4.7.2 # via google-auth s3transfer==0.5.2 @@ -864,29 +485,26 @@ scp==0.13.2 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/pkg/linux.in six==1.16.0 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # apscheduler # bcrypt # cassandra-driver # cheroot - # cryptography # etcd3-py # genshi # geomet - # google-auth - # isodate # jsonschema # junos-eznc # kazoo # kubernetes - # mock # more-itertools # ncclient # paramiko - # pynacl - # pyopenssl # python-consul # python-dateutil # pyvmomi @@ -900,14 +518,16 @@ slack-bolt==1.15.5 # via -r requirements/static/ci/linux.in slack-sdk==3.19.5 # via slack-bolt -smmap==3.0.4 +smmap==4.0.0 # via gitdb -sqlparse==0.4.2 +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # portend terminal==0.4.0 # via ntc-templates textfsm==1.1.0 @@ -916,26 +536,30 @@ textfsm==1.1.0 # netmiko # ntc-templates timelib==0.2.5 - # via -r requirements/static/pkg/linux.in + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # -r requirements/static/pkg/linux.in toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 # via pytest tornado==6.1 # via python-telegram-bot -transitions==0.8.1 +transitions==0.8.9 # via junos-eznc twilio==7.9.2 # via -r requirements/static/ci/linux.in -typing-extensions==4.2.0 +typing-extensions==4.8.0 # via # pytest-shell-utilities # pytest-system-statistics -tzlocal==2.1 +tzlocal==3.0 # via apscheduler -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -951,7 +575,9 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver @@ -964,9 +590,13 @@ yamlordereddictloader==0.4.0 yarl==1.7.2 # via aiohttp zc.lockfile==1.4 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.9/linux.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/py3.9/tools-virustotal.txt b/requirements/static/ci/py3.9/tools-virustotal.txt new file mode 100644 index 000000000000..6154582577ba --- /dev/null +++ b/requirements/static/ci/py3.9/tools-virustotal.txt @@ -0,0 +1,28 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --output-file=requirements/static/ci/py3.9/tools-virustotal.txt requirements/static/ci/tools-virustotal.in +# +certifi==2023.7.22 + # via + # -c requirements/static/ci/../ci/py3.9/tools.txt + # requests +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../ci/py3.9/tools.txt + # requests +idna==3.2 + # via + # -c requirements/static/ci/../ci/py3.9/tools.txt + # requests +requests==2.31.0 + # via + # -c requirements/static/ci/../ci/py3.9/tools.txt + # virustotal3 +urllib3==1.26.18 + # via + # -c requirements/static/ci/../ci/py3.9/tools.txt + # requests +virustotal3==1.0.8 + # via -r requirements/static/ci/tools-virustotal.in diff --git a/requirements/static/ci/py3.9/tools.txt b/requirements/static/ci/py3.9/tools.txt index dd9c8e7e006c..49375c5e736e 100644 --- a/requirements/static/ci/py3.9/tools.txt +++ b/requirements/static/ci/py3.9/tools.txt @@ -2,25 +2,23 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/tools.txt --pip-args='--constraint=requirements/static/ci/py3.9/linux.txt' requirements/static/ci/tools.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/tools.txt requirements/static/ci/tools.in # -attrs==22.1.0 +attrs==20.3.0 # via # -r requirements/static/ci/tools.in # python-tools-scripts -boto3==1.21.46 +boto3==1.26.147 # via -r requirements/static/ci/tools.in -botocore==1.24.46 +botocore==1.29.147 # via # boto3 # s3transfer -certifi==2022.12.7 +certifi==2023.7.22 # via requests -charset-normalizer==3.0.1 +charset-normalizer==3.2.0 # via requests -commonmark==0.9.1 - # via rich -idna==3.4 +idna==3.2 # via requests jinja2==3.1.2 # via -r requirements/static/ci/tools.in @@ -28,29 +26,33 @@ jmespath==1.0.1 # via # boto3 # botocore +markdown-it-py==2.2.0 + # via rich markupsafe==2.1.2 # via jinja2 -packaging==23.0 +mdurl==0.1.2 + # via markdown-it-py +packaging==22.0 # via -r requirements/static/ci/tools.in pygments==2.13.0 # via rich -python-dateutil==2.8.2 +python-dateutil==2.8.1 # via botocore -python-tools-scripts==0.11.1 +python-tools-scripts==0.18.6 # via -r requirements/static/ci/tools.in -pyyaml==6.0 +pyyaml==6.0.1 # via -r requirements/static/ci/tools.in -requests==2.28.2 +requests==2.31.0 # via python-tools-scripts -rich==12.5.1 +rich==13.3.5 # via python-tools-scripts -s3transfer==0.5.2 +s3transfer==0.6.1 # via boto3 six==1.16.0 # via python-dateutil -typing-extensions==4.4.0 +typing-extensions==4.2.0 # via python-tools-scripts -urllib3==1.26.12 +urllib3==1.26.18 # via # botocore # requests diff --git a/requirements/static/ci/py3.9/windows-crypto.txt b/requirements/static/ci/py3.9/windows-crypto.txt index 1fcc84af5ae2..c0ef4e1ec6ca 100644 --- a/requirements/static/ci/py3.9/windows-crypto.txt +++ b/requirements/static/ci/py3.9/windows-crypto.txt @@ -2,7 +2,7 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/windows-crypto.txt --pip-args='--constraint=requirements/static/ci/py3.9/windows.txt' requirements/static/ci/crypto.in +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/windows-crypto.txt requirements/static/ci/crypto.in # m2crypto==0.38.0 # via -r requirements/static/ci/crypto.in diff --git a/requirements/static/ci/py3.9/windows.txt b/requirements/static/ci/py3.9/windows.txt index 8c2bc2144fd1..dcb2c3e4a6b2 100644 --- a/requirements/static/ci/py3.9/windows.txt +++ b/requirements/static/ci/py3.9/windows.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/ci/py3.9/windows.txt --pip-args='--constraint=requirements/static/pkg/py3.9/windows.txt' requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/ci/py3.9/windows.txt requirements/pytest.txt requirements/static/ci/common.in requirements/static/ci/windows.in requirements/static/pkg/windows.in requirements/windows.txt # -aiohttp==3.8.1 +aiohttp==3.9.0 # via etcd3-py aiosignal==1.2.0 # via aiohttp async-timeout==4.0.2 # via aiohttp -attrs==20.3.0 +attrs==23.1.0 # via # aiohttp # jsonschema @@ -22,8 +22,8 @@ attrs==20.3.0 backports.entry-points-selectable==1.1.0 # via virtualenv bcrypt==4.0.1 - # via passlib -boto3==1.21.46 ; python_version >= "3.6" + # via -r requirements/static/ci/common.in +boto3==1.21.46 # via # -r requirements/static/ci/common.in # moto @@ -38,43 +38,54 @@ cachetools==3.1.0 # via google-auth cassandra-driver==3.23.0 # via -r requirements/static/ci/common.in -certifi==2022.12.7 +certifi==2023.07.22 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # kubernetes # requests cffi==1.14.6 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # clr-loader # cryptography # pygit2 -chardet==3.0.4 - # via requests -charset-normalizer==2.0.12 - # via aiohttp + # pynacl +charset-normalizer==3.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # requests cheetah3==3.2.6.post2 # via -r requirements/static/ci/common.in cheroot==8.5.2 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # cherrypy cherrypy==18.6.1 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt click==7.1.2 # via geomet clr-loader==0.2.4 - # via pythonnet + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # pythonnet clustershell==1.8.3 # via -r requirements/static/ci/common.in colorama==0.4.1 # via pytest contextvars==2.4 - # via -r requirements/base.txt -cryptography==3.4.7 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/base.txt +cryptography==41.0.7 + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/windows.txt # etcd3-py # moto @@ -84,6 +95,7 @@ distlib==0.3.2 # via virtualenv distro==1.5.0 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt # pytest-skip-markers dmidecode==0.9.0 @@ -92,11 +104,9 @@ dnspython==1.16.0 # via # -r requirements/static/ci/common.in # python-etcd -docker-pycreds==0.4.0 - # via docker -docker==2.7.0 - # via -r requirements/static/ci/common.in -etcd3-py==0.1.6 ; python_version >= "3.6" +docker==6.1.3 + # via -r requirements/pytest.txt +etcd3-py==0.1.6 # via -r requirements/static/ci/common.in exceptiongroup==1.0.4 # via pytest @@ -113,45 +123,64 @@ genshi==0.7.5 geomet==0.1.2 # via cassandra-driver gitdb==4.0.7 - # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # gitpython +gitpython==3.1.37 + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt -google-auth==1.6.3 +google-auth==2.1.0 # via kubernetes -idna==2.8 +idna==3.2 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # etcd3-py # requests # yarl immutables==0.15 - # via contextvars + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # contextvars importlib-metadata==6.0.0 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt iniconfig==1.0.1 # via pytest ioloop==0.1a0 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt ipaddress==1.0.22 # via kubernetes jaraco.classes==3.2.1 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # jaraco.collections jaraco.collections==3.3.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # cherrypy jaraco.functools==2.0 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # cheroot # jaraco.text # tempora jaraco.text==3.5.0 - # via jaraco.collections + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # jaraco.collections jinja2==3.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt # moto jmespath==1.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # boto3 @@ -163,32 +192,38 @@ keyring==5.7.1 kubernetes==3.0.0 # via -r requirements/static/ci/common.in looseversion==1.0.2 - # via -r requirements/base.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/base.txt lxml==4.9.1 - # via -r requirements/windows.txt -mako==1.2.2 # via - # -r requirements/static/ci/common.in + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/windows.txt +mako==1.2.2 + # via -r requirements/static/ci/common.in markupsafe==2.1.2 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt # jinja2 # mako # moto # werkzeug -mock==3.0.5 +mock==5.1.0 # via -r requirements/pytest.txt more-itertools==8.2.0 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/pytest.txt # cheroot # cherrypy # jaraco.classes # jaraco.functools -moto==3.0.1 ; python_version >= "3.6" +moto==3.0.1 # via -r requirements/static/ci/common.in msgpack==1.0.2 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt # pytest-salt-factories multidict==6.0.2 @@ -197,11 +232,13 @@ multidict==6.0.2 # yarl ntlm-auth==1.5.0 # via requests-ntlm -packaging==21.3 +packaging==22.0 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt + # docker # pytest -passlib[bcrypt]==1.7.4 +passlib==1.7.4 # via -r requirements/static/ci/common.in patch==1.16 # via -r requirements/static/ci/windows.in @@ -214,9 +251,12 @@ platformdirs==2.2.0 pluggy==0.13.0 # via pytest portend==2.6 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # cherrypy psutil==5.8.0 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt # pytest-salt-factories # pytest-shell-utilities @@ -225,28 +265,39 @@ pyasn1-modules==0.2.4 # via google-auth pyasn1==0.4.8 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/windows.txt # pyasn1-modules # rsa pycparser==2.21 ; python_version >= "3.9" # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt # cffi pycryptodomex==3.10.1 - # via -r requirements/crypto.txt -pygit2==1.9.1 ; python_version >= "3.7" + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/crypto.txt +pyfakefs==5.3.1 + # via -r requirements/pytest.txt +pygit2==1.13.1 # via -r requirements/static/ci/windows.in pymssql==2.2.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt pymysql==1.0.2 - # via -r requirements/windows.txt -pyopenssl==20.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt +pynacl==1.5.0 + # via -r requirements/static/ci/common.in +pyopenssl==23.2.0 + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/windows.txt # etcd3-py -pyparsing==3.0.9 - # via packaging pyrsistent==0.17.3 # via jsonschema pytest-custom-exit-code==0.3.0 @@ -256,13 +307,13 @@ pytest-helpers-namespace==2021.4.29 # -r requirements/pytest.txt # pytest-salt-factories # pytest-shell-utilities -pytest-httpserver==1.0.4 +pytest-httpserver==1.0.8 # via -r requirements/pytest.txt -pytest-salt-factories==1.0.0rc21 ; sys_platform == "win32" +pytest-salt-factories==1.0.0rc28 # via -r requirements/pytest.txt -pytest-shell-utilities==1.6.0 +pytest-shell-utilities==1.8.0 # via pytest-salt-factories -pytest-skip-markers==1.2.0 +pytest-skip-markers==1.5.0 # via # pytest-salt-factories # pytest-shell-utilities @@ -271,13 +322,9 @@ pytest-subtests==0.4.0 # via -r requirements/pytest.txt pytest-system-statistics==1.0.2 # via pytest-salt-factories -pytest-tempdir==2019.10.12 - # via - # -r requirements/pytest.txt - # pytest-salt-factories pytest-timeout==1.4.2 # via -r requirements/pytest.txt -pytest==7.2.0 ; python_version > "3.6" +pytest==7.2.0 # via # -r requirements/pytest.txt # pytest-custom-exit-code @@ -287,10 +334,10 @@ pytest==7.2.0 ; python_version > "3.6" # pytest-skip-markers # pytest-subtests # pytest-system-statistics - # pytest-tempdir # pytest-timeout python-dateutil==2.8.1 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/windows.txt # botocore # kubernetes @@ -298,37 +345,48 @@ python-dateutil==2.8.1 python-etcd==0.4.5 # via -r requirements/static/ci/common.in python-gnupg==0.4.8 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt pythonnet==3.0.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt pytz==2022.1 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # moto # tempora pyvmomi==6.7.1.2018.12 # via -r requirements/static/ci/common.in pywin32==305 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/windows.txt # cherrypy + # docker # pytest-skip-markers # wmi pywinrm==0.4.1 # via -r requirements/static/ci/windows.in -pyyaml==5.4.1 +pyyaml==6.0.1 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt # clustershell # kubernetes + # pytest-salt-factories # yamllint -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/zeromq.txt # pytest-salt-factories requests-ntlm==1.1.0 # via pywinrm -requests==2.25.1 +requests==2.31.0 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/base.txt # -r requirements/static/ci/common.in # -r requirements/windows.txt @@ -353,21 +411,19 @@ sed==0.3.1 semantic-version==2.9.0 # via etcd3-py setproctitle==1.3.2 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt six==1.15.0 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # cassandra-driver # cheroot - # docker - # docker-pycreds # etcd3-py # genshi # geomet - # google-auth # jsonschema # kubernetes - # mock - # pyopenssl # python-dateutil # pyvmomi # pywinrm @@ -375,15 +431,21 @@ six==1.15.0 # virtualenv # websocket-client smmap==4.0.0 - # via gitdb -sqlparse==0.4.2 + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # gitdb +sqlparse==0.4.4 # via -r requirements/static/ci/common.in strict-rfc3339==0.7 # via -r requirements/static/ci/common.in tempora==4.1.1 - # via portend + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # portend timelib==0.2.5 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt toml==0.10.2 # via -r requirements/static/ci/common.in tomli==2.0.1 @@ -392,10 +454,12 @@ typing-extensions==4.2.0 # via # pytest-shell-utilities # pytest-system-statistics -urllib3==1.26.6 +urllib3==1.26.18 # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt # -r requirements/windows.txt # botocore + # docker # kubernetes # python-etcd # requests @@ -409,14 +473,20 @@ websocket-client==0.40.0 # via # docker # kubernetes -werkzeug==2.2.3 +wempy==0.2.1 + # via -r requirements/static/ci/common.in +werkzeug==3.0.1 # via # moto # pytest-httpserver wheel==0.38.4 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt wmi==1.5.1 - # via -r requirements/windows.txt + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # -r requirements/windows.txt xmltodict==0.12.0 # via # moto @@ -426,9 +496,13 @@ yamllint==1.26.3 yarl==1.7.2 # via aiohttp zc.lockfile==2.0 - # via cherrypy + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # cherrypy zipp==3.5.0 - # via importlib-metadata + # via + # -c requirements/static/ci/../pkg/py3.9/windows.txt + # importlib-metadata # The following packages are considered to be unsafe in a requirements file: # setuptools diff --git a/requirements/static/ci/tools-virustotal.in b/requirements/static/ci/tools-virustotal.in new file mode 100644 index 000000000000..f5830e231078 --- /dev/null +++ b/requirements/static/ci/tools-virustotal.in @@ -0,0 +1,3 @@ +--constraint=../ci/py{py_version}/tools.txt + +virustotal3 diff --git a/requirements/static/ci/tools.in b/requirements/static/ci/tools.in index 08f134ab202a..7bc0163df058 100644 --- a/requirements/static/ci/tools.in +++ b/requirements/static/ci/tools.in @@ -1,5 +1,5 @@ -python-tools-scripts >= 0.11.1 attrs +python-tools-scripts >= 0.18.6 boto3 pyyaml jinja2 diff --git a/requirements/static/ci/windows.in b/requirements/static/ci/windows.in index b2b7857de46a..52afea9a71e6 100644 --- a/requirements/static/ci/windows.in +++ b/requirements/static/ci/windows.in @@ -1,7 +1,9 @@ # This is a compilation of requirements installed on salt-jenkins git.salt state run +--constraint=../pkg/py{py_version}/{platform}.txt + dmidecode patch -pygit2>=1.2.0; python_version >= '3.7' +pygit2>=1.10.1 sed pywinrm>=0.4.1 yamllint diff --git a/requirements/static/pkg/freebsd.in b/requirements/static/pkg/freebsd.in index 87c87c99e390..3780d5b0fd89 100644 --- a/requirements/static/pkg/freebsd.in +++ b/requirements/static/pkg/freebsd.in @@ -1,9 +1,9 @@ # This file only exists to trigger the right static compiled requirements destination # Any non hard dependencies of Salt for FreeBSD can go here cherrypy -backports.ssl_match_hostname>=3.7.0.1; python_version < '3.7' +cryptography>=41.0.3 pycparser>=2.21; python_version >= '3.9' -pyopenssl>=19.0.0 +pyopenssl>=23.2.0 python-dateutil>=2.8.0 python-gnupg>=0.4.4 setproctitle>=1.2.3 diff --git a/requirements/static/pkg/linux.in b/requirements/static/pkg/linux.in index baed44be17c7..937c6d3b2040 100644 --- a/requirements/static/pkg/linux.in +++ b/requirements/static/pkg/linux.in @@ -1,12 +1,12 @@ # This file only exists to trigger the right static compiled requirements destination. # Any non hard dependencies of Salt for linux can go here cherrypy -backports.ssl_match_hostname>=3.7.0.1; python_version < '3.7' pycparser>=2.21; python_version >= '3.9' -pyopenssl>=19.0.0 +pyopenssl>=23.2.0 python-dateutil>=2.8.0 python-gnupg>=0.4.4 rpm-vercmp setproctitle>=1.2.3 timelib>=0.2.5 importlib-metadata>=3.3.0 +cryptography>=41.0.3 diff --git a/requirements/static/pkg/py3.10/darwin.txt b/requirements/static/pkg/py3.10/darwin.txt index 3234ecf5e771..94d701749537 100644 --- a/requirements/static/pkg/py3.10/darwin.txt +++ b/requirements/static/pkg/py3.10/darwin.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.10/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.10/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in # apache-libcloud==2.5.0 # via -r requirements/darwin.txt -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -18,17 +18,17 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 +cryptography==41.0.7 # via # -r requirements/darwin.txt # pyopenssl distro==1.5.0 # via -r requirements/base.txt -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/darwin.txt -idna==2.8 +idna==3.2 # via # -r requirements/darwin.txt # requests @@ -55,13 +55,10 @@ linode-python==1.1.1 # via -r requirements/darwin.txt looseversion==1.0.2 # via -r requirements/base.txt -mako==1.2.2 - # via -r requirements/darwin.txt markupsafe==2.1.2 # via # -r requirements/base.txt # jinja2 - # mako more-itertools==8.2.0 # via # cheroot @@ -84,7 +81,7 @@ pycparser==2.21 # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.0.0 +pyopenssl==23.2.0 # via -r requirements/darwin.txt python-dateutil==2.8.0 # via -r requirements/darwin.txt @@ -92,11 +89,11 @@ python-gnupg==0.4.8 # via -r requirements/darwin.txt pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # apache-libcloud @@ -106,16 +103,14 @@ setproctitle==1.3.2 six==1.16.0 # via # cheroot - # cryptography - # pyopenssl # python-dateutil -smmap==3.0.2 +smmap==4.0.0 # via gitdb tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/darwin.txt -urllib3==1.26.6 +urllib3==1.26.18 # via requests vultr==1.0.1 # via -r requirements/darwin.txt diff --git a/requirements/static/pkg/py3.10/freebsd.txt b/requirements/static/pkg/py3.10/freebsd.txt index e34627b28a35..f1ee26c72c13 100644 --- a/requirements/static/pkg/py3.10/freebsd.txt +++ b/requirements/static/pkg/py3.10/freebsd.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.10/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.10/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -16,13 +16,15 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 - # via pyopenssl +cryptography==41.0.7 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl distro==1.5.0 # via # -r requirements/base.txt # -r requirements/static/pkg/freebsd.in -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -69,7 +71,7 @@ pycparser==2.21 ; python_version >= "3.9" # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.1.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 # via -r requirements/static/pkg/freebsd.in @@ -77,26 +79,24 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in six==1.16.0 # via # cheroot - # cryptography # more-itertools - # pyopenssl # python-dateutil tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/static/pkg/freebsd.in -urllib3==1.26.6 +urllib3==1.26.18 # via requests zc.lockfile==1.4 # via cherrypy diff --git a/requirements/static/pkg/py3.10/linux.txt b/requirements/static/pkg/py3.10/linux.txt index f82f067553e3..bb3f6f7406ce 100644 --- a/requirements/static/pkg/py3.10/linux.txt +++ b/requirements/static/pkg/py3.10/linux.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.10/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.10/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -16,11 +16,13 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 - # via pyopenssl +cryptography==41.0.7 + # via + # -r requirements/static/pkg/linux.in + # pyopenssl distro==1.5.0 # via -r requirements/base.txt -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -67,7 +69,7 @@ pycparser==2.21 ; python_version >= "3.9" # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.1.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 # via -r requirements/static/pkg/linux.in @@ -75,11 +77,11 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in @@ -88,15 +90,13 @@ setproctitle==1.3.2 six==1.16.0 # via # cheroot - # cryptography # more-itertools - # pyopenssl # python-dateutil tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/static/pkg/linux.in -urllib3==1.26.6 +urllib3==1.26.18 # via requests zc.lockfile==1.4 # via cherrypy diff --git a/requirements/static/pkg/py3.10/windows.txt b/requirements/static/pkg/py3.10/windows.txt index f7713b8b9a05..2c614554fac7 100644 --- a/requirements/static/pkg/py3.10/windows.txt +++ b/requirements/static/pkg/py3.10/windows.txt @@ -2,9 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.10/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.10/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/windows.txt # requests @@ -13,7 +13,7 @@ cffi==1.14.6 # -r requirements/windows.txt # clr-loader # cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==3.4.7 +cryptography==41.0.7 # via # -r requirements/windows.txt # pyopenssl @@ -31,9 +31,9 @@ distro==1.5.0 # via -r requirements/base.txt gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/windows.txt -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -60,13 +60,10 @@ looseversion==1.0.2 # via -r requirements/base.txt lxml==4.9.1 # via -r requirements/windows.txt -mako==1.2.2 - # via -r requirements/windows.txt markupsafe==2.1.2 # via # -r requirements/base.txt # jinja2 - # mako more-itertools==8.2.0 # via # cheroot @@ -93,7 +90,7 @@ pymssql==2.2.7 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==20.0.1 +pyopenssl==23.2.0 # via -r requirements/windows.txt python-dateutil==2.8.1 # via -r requirements/windows.txt @@ -107,11 +104,11 @@ pywin32==305 # via # -r requirements/windows.txt # wmi -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/windows.txt @@ -120,7 +117,6 @@ setproctitle==1.3.2 six==1.15.0 # via # cheroot - # pyopenssl # python-dateutil smmap==4.0.0 # via gitdb @@ -128,7 +124,7 @@ tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/windows.txt -urllib3==1.26.6 +urllib3==1.26.18 # via # -r requirements/windows.txt # requests diff --git a/requirements/static/pkg/py3.11/darwin.txt b/requirements/static/pkg/py3.11/darwin.txt new file mode 100644 index 000000000000..5d168e28d84a --- /dev/null +++ b/requirements/static/pkg/py3.11/darwin.txt @@ -0,0 +1,123 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in +# +apache-libcloud==2.5.0 + # via -r requirements/darwin.txt +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/darwin.txt +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.7 + # via + # -r requirements/darwin.txt + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/darwin.txt +idna==3.2 + # via + # -r requirements/darwin.txt + # requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/darwin.txt +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +linode-python==1.1.1 + # via -r requirements/darwin.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.6 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pyasn1==0.4.8 + # via -r requirements/darwin.txt +pycparser==2.21 + # via + # -r requirements/darwin.txt + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/darwin.txt +python-dateutil==2.8.0 + # via -r requirements/darwin.txt +python-gnupg==0.4.8 + # via -r requirements/darwin.txt +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via + # -r requirements/base.txt + # apache-libcloud + # vultr +setproctitle==1.3.2 + # via -r requirements/darwin.txt +six==1.16.0 + # via + # cheroot + # python-dateutil +smmap==4.0.0 + # via gitdb +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/darwin.txt +urllib3==1.26.18 + # via requests +vultr==1.0.1 + # via -r requirements/darwin.txt +zc.lockfile==2.0 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.11/freebsd.txt b/requirements/static/pkg/py3.11/freebsd.txt new file mode 100644 index 000000000000..f1bffd3171d6 --- /dev/null +++ b/requirements/static/pkg/py3.11/freebsd.txt @@ -0,0 +1,107 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/static/pkg/freebsd.in +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.7 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl +distro==1.5.0 + # via + # -r requirements/base.txt + # -r requirements/static/pkg/freebsd.in +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/static/pkg/freebsd.in +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.4 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pycparser==2.21 ; python_version >= "3.9" + # via + # -r requirements/static/pkg/freebsd.in + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/static/pkg/freebsd.in +python-dateutil==2.8.1 + # via -r requirements/static/pkg/freebsd.in +python-gnupg==0.4.8 + # via -r requirements/static/pkg/freebsd.in +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +setproctitle==1.3.2 + # via -r requirements/static/pkg/freebsd.in +six==1.16.0 + # via + # cheroot + # more-itertools + # python-dateutil +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/static/pkg/freebsd.in +urllib3==1.26.18 + # via requests +zc.lockfile==1.4 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.11/linux.txt b/requirements/static/pkg/py3.11/linux.txt new file mode 100644 index 000000000000..77dcdbad00cf --- /dev/null +++ b/requirements/static/pkg/py3.11/linux.txt @@ -0,0 +1,107 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/static/pkg/linux.in +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.7 + # via + # -r requirements/static/pkg/linux.in + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/static/pkg/linux.in +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.4 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pycparser==2.21 ; python_version >= "3.9" + # via + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/static/pkg/linux.in +python-dateutil==2.8.1 + # via -r requirements/static/pkg/linux.in +python-gnupg==0.4.8 + # via -r requirements/static/pkg/linux.in +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +rpm-vercmp==0.1.2 + # via -r requirements/static/pkg/linux.in +setproctitle==1.3.2 + # via -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # cheroot + # more-itertools + # python-dateutil +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/static/pkg/linux.in +urllib3==1.26.18 + # via requests +zc.lockfile==1.4 + # via cherrypy +zipp==3.6.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.11/windows.txt b/requirements/static/pkg/py3.11/windows.txt new file mode 100644 index 000000000000..e1416d0caa27 --- /dev/null +++ b/requirements/static/pkg/py3.11/windows.txt @@ -0,0 +1,141 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.11/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# +certifi==2023.07.22 + # via + # -r requirements/windows.txt + # requests +cffi==1.14.6 + # via + # -r requirements/windows.txt + # clr-loader + # cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/windows.txt +clr-loader==0.2.4 + # via pythonnet +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.7 + # via + # -r requirements/windows.txt + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/windows.txt +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/windows.txt +ioloop==0.1a0 + # via -r requirements/windows.txt +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.3.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.0 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +lxml==4.9.1 + # via -r requirements/windows.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.6 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pyasn1==0.4.8 + # via -r requirements/windows.txt +pycparser==2.21 + # via + # -r requirements/windows.txt + # cffi +pycryptodomex==3.10.1 + # via -r requirements/crypto.txt +pymssql==2.2.7 + # via -r requirements/windows.txt +pymysql==1.0.2 + # via -r requirements/windows.txt +pyopenssl==23.2.0 + # via -r requirements/windows.txt +python-dateutil==2.8.1 + # via -r requirements/windows.txt +python-gnupg==0.4.8 + # via -r requirements/windows.txt +pythonnet==3.0.1 + # via -r requirements/windows.txt +pytz==2022.1 + # via tempora +pywin32==305 + # via + # -r requirements/windows.txt + # wmi +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==25.0.2 ; sys_platform == "win32" + # via -r requirements/zeromq.txt +requests==2.31.0 + # via + # -r requirements/base.txt + # -r requirements/windows.txt +setproctitle==1.3.2 + # via -r requirements/windows.txt +six==1.15.0 + # via + # cheroot + # python-dateutil +smmap==4.0.0 + # via gitdb +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/windows.txt +urllib3==1.26.18 + # via + # -r requirements/windows.txt + # requests +wheel==0.38.4 + # via -r requirements/windows.txt +wmi==1.5.1 + # via -r requirements/windows.txt +zc.lockfile==2.0 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/darwin.txt b/requirements/static/pkg/py3.12/darwin.txt new file mode 100644 index 000000000000..d0461e528bbe --- /dev/null +++ b/requirements/static/pkg/py3.12/darwin.txt @@ -0,0 +1,123 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in +# +apache-libcloud==2.5.0 + # via -r requirements/darwin.txt +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/darwin.txt +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.7 + # via + # -r requirements/darwin.txt + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/darwin.txt +idna==3.2 + # via + # -r requirements/darwin.txt + # requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/darwin.txt +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +linode-python==1.1.1 + # via -r requirements/darwin.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.6 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pyasn1==0.4.8 + # via -r requirements/darwin.txt +pycparser==2.21 + # via + # -r requirements/darwin.txt + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/darwin.txt +python-dateutil==2.8.0 + # via -r requirements/darwin.txt +python-gnupg==0.4.8 + # via -r requirements/darwin.txt +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via + # -r requirements/base.txt + # apache-libcloud + # vultr +setproctitle==1.3.2 + # via -r requirements/darwin.txt +six==1.16.0 + # via + # cheroot + # python-dateutil +smmap==4.0.0 + # via gitdb +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/darwin.txt +urllib3==1.26.18 + # via requests +vultr==1.0.1 + # via -r requirements/darwin.txt +zc.lockfile==2.0 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/freebsd.txt b/requirements/static/pkg/py3.12/freebsd.txt new file mode 100644 index 000000000000..733bfb0984eb --- /dev/null +++ b/requirements/static/pkg/py3.12/freebsd.txt @@ -0,0 +1,107 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/static/pkg/freebsd.in +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.7 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl +distro==1.5.0 + # via + # -r requirements/base.txt + # -r requirements/static/pkg/freebsd.in +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/static/pkg/freebsd.in +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.4 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pycparser==2.21 ; python_version >= "3.9" + # via + # -r requirements/static/pkg/freebsd.in + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/static/pkg/freebsd.in +python-dateutil==2.8.1 + # via -r requirements/static/pkg/freebsd.in +python-gnupg==0.4.8 + # via -r requirements/static/pkg/freebsd.in +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +setproctitle==1.3.2 + # via -r requirements/static/pkg/freebsd.in +six==1.16.0 + # via + # cheroot + # more-itertools + # python-dateutil +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/static/pkg/freebsd.in +urllib3==1.26.18 + # via requests +zc.lockfile==1.4 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/linux.txt b/requirements/static/pkg/py3.12/linux.txt new file mode 100644 index 000000000000..b10d400a8e78 --- /dev/null +++ b/requirements/static/pkg/py3.12/linux.txt @@ -0,0 +1,107 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# +certifi==2023.07.22 + # via requests +cffi==1.14.6 + # via cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/static/pkg/linux.in +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.7 + # via + # -r requirements/static/pkg/linux.in + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/static/pkg/linux.in +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.4.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.1 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==5.0.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.4 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pycparser==2.21 ; python_version >= "3.9" + # via + # -r requirements/static/pkg/linux.in + # cffi +pycryptodomex==3.9.8 + # via -r requirements/crypto.txt +pyopenssl==23.2.0 + # via -r requirements/static/pkg/linux.in +python-dateutil==2.8.1 + # via -r requirements/static/pkg/linux.in +python-gnupg==0.4.8 + # via -r requirements/static/pkg/linux.in +pytz==2022.1 + # via tempora +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==23.2.0 + # via -r requirements/zeromq.txt +requests==2.31.0 + # via -r requirements/base.txt +rpm-vercmp==0.1.2 + # via -r requirements/static/pkg/linux.in +setproctitle==1.3.2 + # via -r requirements/static/pkg/linux.in +six==1.16.0 + # via + # cheroot + # more-itertools + # python-dateutil +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/static/pkg/linux.in +urllib3==1.26.18 + # via requests +zc.lockfile==1.4 + # via cherrypy +zipp==3.6.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.12/windows.txt b/requirements/static/pkg/py3.12/windows.txt new file mode 100644 index 000000000000..50f67f958b73 --- /dev/null +++ b/requirements/static/pkg/py3.12/windows.txt @@ -0,0 +1,141 @@ +# +# This file is autogenerated by pip-compile +# To update, run: +# +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.12/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# +certifi==2023.07.22 + # via + # -r requirements/windows.txt + # requests +cffi==1.14.6 + # via + # -r requirements/windows.txt + # clr-loader + # cryptography +charset-normalizer==3.2.0 + # via requests +cheroot==8.5.2 + # via cherrypy +cherrypy==18.6.1 + # via -r requirements/windows.txt +clr-loader==0.2.6 + # via pythonnet +contextvars==2.4 + # via -r requirements/base.txt +cryptography==41.0.7 + # via + # -r requirements/windows.txt + # pyopenssl +distro==1.5.0 + # via -r requirements/base.txt +gitdb==4.0.7 + # via gitpython +gitpython==3.1.37 + # via -r requirements/windows.txt +idna==3.2 + # via requests +immutables==0.15 + # via contextvars +importlib-metadata==6.0.0 + # via -r requirements/windows.txt +ioloop==0.1a0 + # via -r requirements/windows.txt +jaraco.classes==3.2.1 + # via jaraco.collections +jaraco.collections==3.3.0 + # via cherrypy +jaraco.functools==2.0 + # via + # cheroot + # jaraco.text + # tempora +jaraco.text==3.5.0 + # via jaraco.collections +jinja2==3.1.2 + # via -r requirements/base.txt +jmespath==1.0.1 + # via -r requirements/base.txt +looseversion==1.0.2 + # via -r requirements/base.txt +lxml==4.9.1 + # via -r requirements/windows.txt +markupsafe==2.1.2 + # via + # -r requirements/base.txt + # jinja2 +more-itertools==8.2.0 + # via + # cheroot + # cherrypy + # jaraco.classes + # jaraco.functools +msgpack==1.0.2 + # via -r requirements/base.txt +packaging==22.0 + # via -r requirements/base.txt +portend==2.6 + # via cherrypy +psutil==5.8.0 + # via -r requirements/base.txt +pyasn1==0.4.8 + # via -r requirements/windows.txt +pycparser==2.21 + # via + # -r requirements/windows.txt + # cffi +pycryptodomex==3.10.1 + # via -r requirements/crypto.txt +pymssql==2.2.7 + # via -r requirements/windows.txt +pymysql==1.0.2 + # via -r requirements/windows.txt +pyopenssl==23.2.0 + # via -r requirements/windows.txt +python-dateutil==2.8.1 + # via -r requirements/windows.txt +python-gnupg==0.4.8 + # via -r requirements/windows.txt +pythonnet==3.0.3 + # via -r requirements/windows.txt +pytz==2022.1 + # via tempora +pywin32==306 + # via + # -r requirements/windows.txt + # wmi +pyyaml==6.0.1 + # via -r requirements/base.txt +pyzmq==25.0.2 ; sys_platform == "win32" + # via -r requirements/zeromq.txt +requests==2.31.0 + # via + # -r requirements/base.txt + # -r requirements/windows.txt +setproctitle==1.3.2 + # via -r requirements/windows.txt +six==1.15.0 + # via + # cheroot + # python-dateutil +smmap==4.0.0 + # via gitdb +tempora==4.1.1 + # via portend +timelib==0.2.5 + # via -r requirements/windows.txt +urllib3==1.26.18 + # via + # -r requirements/windows.txt + # requests +wheel==0.38.4 + # via -r requirements/windows.txt +wmi==1.5.1 + # via -r requirements/windows.txt +zc.lockfile==2.0 + # via cherrypy +zipp==3.12.0 + # via importlib-metadata + +# The following packages are considered to be unsafe in a requirements file: +# setuptools diff --git a/requirements/static/pkg/py3.7/freebsd.txt b/requirements/static/pkg/py3.7/freebsd.txt index b1cb9e6f64cd..57ebefb0f2ed 100644 --- a/requirements/static/pkg/py3.7/freebsd.txt +++ b/requirements/static/pkg/py3.7/freebsd.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.7/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.7/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -16,13 +16,15 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 - # via pyopenssl +cryptography==41.0.7 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl distro==1.5.0 # via # -r requirements/base.txt # -r requirements/static/pkg/freebsd.in -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -67,7 +69,7 @@ pycparser==2.17 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.1.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 # via -r requirements/static/pkg/freebsd.in @@ -75,20 +77,18 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in six==1.16.0 # via # cheroot - # cryptography # more-itertools - # pyopenssl # python-dateutil tempora==4.1.1 # via portend @@ -96,7 +96,7 @@ timelib==0.2.5 # via -r requirements/static/pkg/freebsd.in typing-extensions==3.10.0.0 # via importlib-metadata -urllib3==1.26.6 +urllib3==1.26.18 # via requests zc.lockfile==1.4 # via cherrypy diff --git a/requirements/static/pkg/py3.7/linux.txt b/requirements/static/pkg/py3.7/linux.txt index c59877186214..2e83e1b6cb90 100644 --- a/requirements/static/pkg/py3.7/linux.txt +++ b/requirements/static/pkg/py3.7/linux.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.7/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.7/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -16,11 +16,13 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 - # via pyopenssl +cryptography==41.0.7 + # via + # -r requirements/static/pkg/linux.in + # pyopenssl distro==1.5.0 # via -r requirements/base.txt -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -65,7 +67,7 @@ pycparser==2.17 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.1.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 # via -r requirements/static/pkg/linux.in @@ -73,11 +75,11 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in @@ -86,9 +88,7 @@ setproctitle==1.3.2 six==1.16.0 # via # cheroot - # cryptography # more-itertools - # pyopenssl # python-dateutil tempora==4.1.1 # via portend @@ -96,7 +96,7 @@ timelib==0.2.5 # via -r requirements/static/pkg/linux.in typing-extensions==3.10.0.0 # via importlib-metadata -urllib3==1.26.6 +urllib3==1.26.18 # via requests zc.lockfile==1.4 # via cherrypy diff --git a/requirements/static/pkg/py3.7/windows.txt b/requirements/static/pkg/py3.7/windows.txt index 795acec69967..4a206c610efe 100644 --- a/requirements/static/pkg/py3.7/windows.txt +++ b/requirements/static/pkg/py3.7/windows.txt @@ -2,9 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.7/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.7/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/windows.txt # requests @@ -13,7 +13,7 @@ cffi==1.14.6 # -r requirements/windows.txt # clr-loader # cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==3.4.7 +cryptography==41.0.7 # via # -r requirements/windows.txt # pyopenssl @@ -31,16 +31,14 @@ distro==1.5.0 # via -r requirements/base.txt gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/windows.txt -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars importlib-metadata==4.6.4 - # via - # -r requirements/windows.txt - # mako + # via -r requirements/windows.txt ioloop==0.1a0 # via -r requirements/windows.txt jaraco.classes==3.2.1 @@ -62,13 +60,10 @@ looseversion==1.0.2 # via -r requirements/base.txt lxml==4.9.1 # via -r requirements/windows.txt -mako==1.2.2 - # via -r requirements/windows.txt markupsafe==2.1.2 # via # -r requirements/base.txt # jinja2 - # mako more-itertools==8.2.0 # via # cheroot @@ -95,7 +90,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==20.0.1 +pyopenssl==23.2.0 # via -r requirements/windows.txt python-dateutil==2.8.1 # via -r requirements/windows.txt @@ -110,11 +105,11 @@ pywin32==305 # -r requirements/windows.txt # cherrypy # wmi -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/windows.txt @@ -123,7 +118,6 @@ setproctitle==1.3.2 six==1.15.0 # via # cheroot - # pyopenssl # python-dateutil smmap==4.0.0 # via gitdb @@ -135,7 +129,7 @@ typing-extensions==4.4.0 # via # gitpython # importlib-metadata -urllib3==1.26.6 +urllib3==1.26.18 # via # -r requirements/windows.txt # requests diff --git a/requirements/static/pkg/py3.8/freebsd.txt b/requirements/static/pkg/py3.8/freebsd.txt index 8e46fd2742e7..5906646aa085 100644 --- a/requirements/static/pkg/py3.8/freebsd.txt +++ b/requirements/static/pkg/py3.8/freebsd.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.8/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.8/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -16,13 +16,15 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 - # via pyopenssl +cryptography==41.0.7 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl distro==1.5.0 # via # -r requirements/base.txt # -r requirements/static/pkg/freebsd.in -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -67,7 +69,7 @@ pycparser==2.17 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.1.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 # via -r requirements/static/pkg/freebsd.in @@ -75,26 +77,24 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in six==1.16.0 # via # cheroot - # cryptography # more-itertools - # pyopenssl # python-dateutil tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/static/pkg/freebsd.in -urllib3==1.26.6 +urllib3==1.26.18 # via requests zc.lockfile==1.4 # via cherrypy diff --git a/requirements/static/pkg/py3.8/linux.txt b/requirements/static/pkg/py3.8/linux.txt index e33e43c56d3d..e72f036b84af 100644 --- a/requirements/static/pkg/py3.8/linux.txt +++ b/requirements/static/pkg/py3.8/linux.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.8/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.8/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -16,11 +16,13 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 - # via pyopenssl +cryptography==41.0.7 + # via + # -r requirements/static/pkg/linux.in + # pyopenssl distro==1.5.0 # via -r requirements/base.txt -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -65,7 +67,7 @@ pycparser==2.17 # via cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.1.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 # via -r requirements/static/pkg/linux.in @@ -73,11 +75,11 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in @@ -86,15 +88,13 @@ setproctitle==1.3.2 six==1.16.0 # via # cheroot - # cryptography # more-itertools - # pyopenssl # python-dateutil tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/static/pkg/linux.in -urllib3==1.26.6 +urllib3==1.26.18 # via requests zc.lockfile==1.4 # via cherrypy diff --git a/requirements/static/pkg/py3.8/windows.txt b/requirements/static/pkg/py3.8/windows.txt index cec5b3f4da4c..f174826068ab 100644 --- a/requirements/static/pkg/py3.8/windows.txt +++ b/requirements/static/pkg/py3.8/windows.txt @@ -2,9 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.8/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.8/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/windows.txt # requests @@ -13,7 +13,7 @@ cffi==1.14.6 # -r requirements/windows.txt # clr-loader # cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==3.4.7 +cryptography==41.0.7 # via # -r requirements/windows.txt # pyopenssl @@ -31,9 +31,9 @@ distro==1.5.0 # via -r requirements/base.txt gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/windows.txt -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -60,13 +60,10 @@ looseversion==1.0.2 # via -r requirements/base.txt lxml==4.9.1 # via -r requirements/windows.txt -mako==1.2.2 - # via -r requirements/windows.txt markupsafe==2.1.2 # via # -r requirements/base.txt # jinja2 - # mako more-itertools==8.2.0 # via # cheroot @@ -93,7 +90,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==20.0.1 +pyopenssl==23.2.0 # via -r requirements/windows.txt python-dateutil==2.8.1 # via -r requirements/windows.txt @@ -108,11 +105,11 @@ pywin32==305 # -r requirements/windows.txt # cherrypy # wmi -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/windows.txt @@ -121,7 +118,6 @@ setproctitle==1.3.2 six==1.15.0 # via # cheroot - # pyopenssl # python-dateutil smmap==4.0.0 # via gitdb @@ -129,7 +125,7 @@ tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/windows.txt -urllib3==1.26.6 +urllib3==1.26.18 # via # -r requirements/windows.txt # requests diff --git a/requirements/static/pkg/py3.9/darwin.txt b/requirements/static/pkg/py3.9/darwin.txt index b7484906ee06..97affcd929c4 100644 --- a/requirements/static/pkg/py3.9/darwin.txt +++ b/requirements/static/pkg/py3.9/darwin.txt @@ -2,15 +2,15 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.9/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.9/darwin.txt requirements/darwin.txt requirements/static/pkg/darwin.in # apache-libcloud==2.5.0 # via -r requirements/darwin.txt -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -18,17 +18,17 @@ cherrypy==18.6.1 # via -r requirements/darwin.txt contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 +cryptography==41.0.7 # via # -r requirements/darwin.txt # pyopenssl distro==1.5.0 # via -r requirements/base.txt -gitdb==4.0.5 +gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/darwin.txt -idna==2.8 +idna==3.2 # via # -r requirements/darwin.txt # requests @@ -55,13 +55,10 @@ linode-python==1.1.1 # via -r requirements/darwin.txt looseversion==1.0.2 # via -r requirements/base.txt -mako==1.2.2 - # via -r requirements/darwin.txt markupsafe==2.1.2 # via # -r requirements/base.txt # jinja2 - # mako more-itertools==8.2.0 # via # cheroot @@ -84,7 +81,7 @@ pycparser==2.21 # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.0.0 +pyopenssl==23.2.0 # via -r requirements/darwin.txt python-dateutil==2.8.0 # via -r requirements/darwin.txt @@ -92,11 +89,11 @@ python-gnupg==0.4.8 # via -r requirements/darwin.txt pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # apache-libcloud @@ -106,16 +103,14 @@ setproctitle==1.3.2 six==1.16.0 # via # cheroot - # cryptography - # pyopenssl # python-dateutil -smmap==3.0.2 +smmap==4.0.0 # via gitdb tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/darwin.txt -urllib3==1.26.6 +urllib3==1.26.18 # via requests vultr==1.0.1 # via -r requirements/darwin.txt diff --git a/requirements/static/pkg/py3.9/freebsd.txt b/requirements/static/pkg/py3.9/freebsd.txt index 2d9583f4d9a5..7f65b8ce4b5b 100644 --- a/requirements/static/pkg/py3.9/freebsd.txt +++ b/requirements/static/pkg/py3.9/freebsd.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.9/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.9/freebsd.txt requirements/base.txt requirements/static/pkg/freebsd.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -16,13 +16,15 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/freebsd.in contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 - # via pyopenssl +cryptography==41.0.7 + # via + # -r requirements/static/pkg/freebsd.in + # pyopenssl distro==1.5.0 # via # -r requirements/base.txt # -r requirements/static/pkg/freebsd.in -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -69,7 +71,7 @@ pycparser==2.21 ; python_version >= "3.9" # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.1.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/freebsd.in python-dateutil==2.8.1 # via -r requirements/static/pkg/freebsd.in @@ -77,26 +79,24 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/freebsd.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt setproctitle==1.3.2 # via -r requirements/static/pkg/freebsd.in six==1.16.0 # via # cheroot - # cryptography # more-itertools - # pyopenssl # python-dateutil tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/static/pkg/freebsd.in -urllib3==1.26.6 +urllib3==1.26.18 # via requests zc.lockfile==1.4 # via cherrypy diff --git a/requirements/static/pkg/py3.9/linux.txt b/requirements/static/pkg/py3.9/linux.txt index fb9a9fc775c0..0c934fd6bc6f 100644 --- a/requirements/static/pkg/py3.9/linux.txt +++ b/requirements/static/pkg/py3.9/linux.txt @@ -2,13 +2,13 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.9/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.9/linux.txt requirements/base.txt requirements/static/pkg/linux.in requirements/zeromq.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via requests cffi==1.14.6 # via cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -16,11 +16,13 @@ cherrypy==18.6.1 # via -r requirements/static/pkg/linux.in contextvars==2.4 # via -r requirements/base.txt -cryptography==3.3.2 - # via pyopenssl +cryptography==41.0.7 + # via + # -r requirements/static/pkg/linux.in + # pyopenssl distro==1.5.0 # via -r requirements/base.txt -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -67,7 +69,7 @@ pycparser==2.21 ; python_version >= "3.9" # cffi pycryptodomex==3.9.8 # via -r requirements/crypto.txt -pyopenssl==19.1.0 +pyopenssl==23.2.0 # via -r requirements/static/pkg/linux.in python-dateutil==2.8.1 # via -r requirements/static/pkg/linux.in @@ -75,11 +77,11 @@ python-gnupg==0.4.8 # via -r requirements/static/pkg/linux.in pytz==2022.1 # via tempora -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt pyzmq==23.2.0 # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via -r requirements/base.txt rpm-vercmp==0.1.2 # via -r requirements/static/pkg/linux.in @@ -88,15 +90,13 @@ setproctitle==1.3.2 six==1.16.0 # via # cheroot - # cryptography # more-itertools - # pyopenssl # python-dateutil tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/static/pkg/linux.in -urllib3==1.26.6 +urllib3==1.26.18 # via requests zc.lockfile==1.4 # via cherrypy diff --git a/requirements/static/pkg/py3.9/windows.txt b/requirements/static/pkg/py3.9/windows.txt index 79418f826020..9d6759d0a3f6 100644 --- a/requirements/static/pkg/py3.9/windows.txt +++ b/requirements/static/pkg/py3.9/windows.txt @@ -2,9 +2,9 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --output-file=requirements/static/pkg/py3.9/windows.txt requirements/static/pkg/windows.in requirements/windows.txt +# pip-compile --no-emit-index-url --output-file=requirements/static/pkg/py3.9/windows.txt requirements/static/pkg/windows.in requirements/windows.txt # -certifi==2022.12.7 +certifi==2023.07.22 # via # -r requirements/windows.txt # requests @@ -13,7 +13,7 @@ cffi==1.14.6 # -r requirements/windows.txt # clr-loader # cryptography -chardet==3.0.4 +charset-normalizer==3.2.0 # via requests cheroot==8.5.2 # via cherrypy @@ -23,7 +23,7 @@ clr-loader==0.2.4 # via pythonnet contextvars==2.4 # via -r requirements/base.txt -cryptography==3.4.7 +cryptography==41.0.7 # via # -r requirements/windows.txt # pyopenssl @@ -31,9 +31,9 @@ distro==1.5.0 # via -r requirements/base.txt gitdb==4.0.7 # via gitpython -gitpython==3.1.30 ; python_version >= "3.7" +gitpython==3.1.37 # via -r requirements/windows.txt -idna==2.8 +idna==3.2 # via requests immutables==0.15 # via contextvars @@ -60,13 +60,10 @@ looseversion==1.0.2 # via -r requirements/base.txt lxml==4.9.1 # via -r requirements/windows.txt -mako==1.2.2 - # via -r requirements/windows.txt markupsafe==2.1.2 # via # -r requirements/base.txt # jinja2 - # mako more-itertools==8.2.0 # via # cheroot @@ -93,7 +90,7 @@ pymssql==2.2.1 # via -r requirements/windows.txt pymysql==1.0.2 # via -r requirements/windows.txt -pyopenssl==20.0.1 +pyopenssl==23.2.0 # via -r requirements/windows.txt python-dateutil==2.8.1 # via -r requirements/windows.txt @@ -108,11 +105,11 @@ pywin32==305 # -r requirements/windows.txt # cherrypy # wmi -pyyaml==5.4.1 +pyyaml==6.0.1 # via -r requirements/base.txt -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" # via -r requirements/zeromq.txt -requests==2.25.1 +requests==2.31.0 # via # -r requirements/base.txt # -r requirements/windows.txt @@ -121,7 +118,6 @@ setproctitle==1.3.2 six==1.15.0 # via # cheroot - # pyopenssl # python-dateutil smmap==4.0.0 # via gitdb @@ -129,7 +125,7 @@ tempora==4.1.1 # via portend timelib==0.2.5 # via -r requirements/windows.txt -urllib3==1.26.6 +urllib3==1.26.18 # via # -r requirements/windows.txt # requests diff --git a/requirements/windows.txt b/requirements/windows.txt index fddac463c2a0..19ae0c889df6 100644 --- a/requirements/windows.txt +++ b/requirements/windows.txt @@ -6,20 +6,18 @@ pywin32>=305 wmi>=1.5.1 pythonnet>=3.0.1 -backports.ssl-match-hostname>=3.7.0.1; python_version < '3.7' certifi>=2022.12.07 cffi>=1.14.5 cherrypy>=18.6.1 -cryptography>=3.4.7 -gitpython>=3.1.30; python_version >= '3.7' +cryptography>=41.0.3 +gitpython>=3.1.37 ioloop>=0.1a0 lxml>=4.6.3 -mako>=1.1.4 pyasn1>=0.4.8 pycparser>=2.21 pymssql>=2.2.1 pymysql>=1.0.2 -pyopenssl>=20.0.1 +pyopenssl>=23.2.0 python-dateutil>=2.8.1 python-gnupg>=0.4.7 requests>=2.25.1 diff --git a/requirements/zeromq.txt b/requirements/zeromq.txt index 2fce952a9f12..1e9a815c1b61 100644 --- a/requirements/zeromq.txt +++ b/requirements/zeromq.txt @@ -2,4 +2,4 @@ -r crypto.txt pyzmq>=20.0.0 -pyzmq==25.0.0 ; sys_platform == "win32" +pyzmq==25.0.2 ; sys_platform == "win32" diff --git a/salt/__init__.py b/salt/__init__.py index 9c7eaffcdefa..a7c32e159a46 100644 --- a/salt/__init__.py +++ b/salt/__init__.py @@ -3,10 +3,11 @@ """ import importlib +import os import sys import warnings -if sys.version_info < (3,): +if sys.version_info < (3,): # pragma: no cover sys.stderr.write( "\n\nAfter the Sodium release, 3001, Salt no longer supports Python 2. Exiting.\n\n" ) @@ -21,29 +22,68 @@ def find_module(self, module_name, package_path=None): if USE_VENDORED_TORNADO: if module_name.startswith("tornado"): return self - else: + else: # pragma: no cover if module_name.startswith("salt.ext.tornado"): return self return None - def load_module(self, name): + def create_module(self, spec): if USE_VENDORED_TORNADO: - mod = importlib.import_module("salt.ext.{}".format(name)) - else: + mod = importlib.import_module("salt.ext.{}".format(spec.name)) + else: # pragma: no cover # Remove 'salt.ext.' from the module - mod = importlib.import_module(name[9:]) - sys.modules[name] = mod + mod = importlib.import_module(spec.name[9:]) + sys.modules[spec.name] = mod return mod + def exec_module(self, module): + return None + + +class NaclImporter: + """ + Import hook to force PyNaCl to perform dlopen on libsodium with the + RTLD_DEEPBIND flag. This is to work around an issue where pyzmq does a dlopen + with RTLD_GLOBAL which then causes calls to libsodium to resolve to + tweetnacl when it's been bundled with pyzmq. + + See: https://github.com/zeromq/pyzmq/issues/1878 + """ + + loading = False + + def find_module(self, module_name, package_path=None): + if not NaclImporter.loading and module_name.startswith("nacl"): + NaclImporter.loading = True + return self + return None + def create_module(self, spec): - return self.load_module(spec.name) + dlopen = hasattr(sys, "getdlopenflags") + if dlopen: + dlflags = sys.getdlopenflags() + # Use RTDL_DEEPBIND in case pyzmq was compiled with ZMQ_USE_TWEETNACL. This is + # needed because pyzmq imports libzmq with RTLD_GLOBAL. + if hasattr(os, "RTLD_DEEPBIND"): + flags = os.RTLD_DEEPBIND | dlflags + else: + flags = dlflags + sys.setdlopenflags(flags) + try: + mod = importlib.import_module(spec.name) + finally: + if dlopen: + sys.setdlopenflags(dlflags) + NaclImporter.loading = False + sys.modules[spec.name] = mod + return mod def exec_module(self, module): return None # Try our importer first -sys.meta_path = [TornadoImporter()] + sys.meta_path +sys.meta_path = [TornadoImporter(), NaclImporter()] + sys.meta_path # All salt related deprecation warnings should be shown once each! @@ -72,6 +112,23 @@ def exec_module(self, module): module="_distutils_hack", ) +warnings.filterwarnings( + "ignore", + message="invalid escape sequence.*", + category=DeprecationWarning, +) + +warnings.filterwarnings( + "ignore", + "Deprecated call to `pkg_resources.declare_namespace.*", + category=DeprecationWarning, +) +warnings.filterwarnings( + "ignore", + ".*pkg_resources is deprecated as an API.*", + category=DeprecationWarning, +) + def __define_global_system_encoding_variable__(): import sys @@ -94,14 +151,14 @@ def __define_global_system_encoding_variable__(): import locale try: - encoding = locale.getdefaultlocale()[-1] - except ValueError: - # A bad locale setting was most likely found: - # https://github.com/saltstack/salt/issues/26063 - pass + encoding = locale.getencoding() + except AttributeError: + # Python < 3.11 + encoding = locale.getpreferredencoding(do_setlocale=True) # This is now garbage collectable del locale + if not encoding: # This is most likely ascii which is not the best but we were # unable to find a better encoding. If this fails, we fall all diff --git a/salt/_compat.py b/salt/_compat.py index ed404e877b0f..7d20691f5949 100644 --- a/salt/_compat.py +++ b/salt/_compat.py @@ -4,6 +4,8 @@ # pylint: disable=unused-import import sys +# pragma: no cover + # The ipaddress module included in Salt is from Python 3.9.5. # When running from Py3.9.5+ use the standard library module, use ours otherwise if sys.version_info >= (3, 9, 5): diff --git a/salt/_logging/handlers.py b/salt/_logging/handlers.py index f4b0b6fec3d1..7492b55b8fdd 100644 --- a/salt/_logging/handlers.py +++ b/salt/_logging/handlers.py @@ -88,7 +88,7 @@ class DeferredStreamHandler(StreamHandler): If anything goes wrong before logging is properly setup, all stored messages will be flushed to the handler's stream, ie, written to console. - .. versionadded:: 3005.0 + .. versionadded:: 3005 """ def __init__(self, stream, max_queue_size=10000): diff --git a/salt/auth/__init__.py b/salt/auth/__init__.py index b87e2aff0df9..df2eebe4f85d 100644 --- a/salt/auth/__init__.py +++ b/salt/auth/__init__.py @@ -323,6 +323,7 @@ def authenticate_key(self, load, key): failure. """ error_msg = 'Authentication failure of type "user" occurred.' + auth_key = load.pop("key", None) if auth_key is None: log.warning(error_msg) @@ -331,28 +332,33 @@ def authenticate_key(self, load, key): if "user" in load: auth_user = AuthUser(load["user"]) if auth_user.is_sudo(): - # If someone sudos check to make sure there is no ACL's around their username - if auth_key != key[self.opts.get("user", "root")]: - log.warning(error_msg) - return False - return auth_user.sudo_name() + for check_key in key: + if auth_key == key[check_key]: + return auth_user.sudo_name() + return False elif ( load["user"] == self.opts.get("user", "root") or load["user"] == "root" ): - if auth_key != key[self.opts.get("user", "root")]: - log.warning( - "Master runs as %r, but user in payload is %r", - self.opts.get("user", "root"), - load["user"], - ) - log.warning(error_msg) - return False + for check_key in key: + if auth_key == key[check_key]: + return True + log.warning( + "Master runs as %r, but user in payload is %r", + self.opts.get("user", "root"), + load["user"], + ) + log.warning(error_msg) + return False + elif auth_user.is_running_user(): if auth_key != key.get(load["user"]): log.warning(error_msg) return False elif auth_key == key.get("root"): pass + elif auth_key == key.get("salt"): + # there is nologin for salt + pass else: if load["user"] in key: # User is authorised, check key and check perms @@ -364,9 +370,13 @@ def authenticate_key(self, load, key): log.warning(error_msg) return False else: - if auth_key != key[salt.utils.user.get_user()]: - log.warning(error_msg) - return False + for check_key in key: + if auth_key == key[check_key]: + return True + + log.warning(error_msg) + return False + return True def get_auth_list(self, load, token=None): diff --git a/salt/auth/pam.py b/salt/auth/pam.py index f0397c1062f1..12af29bbdb80 100644 --- a/salt/auth/pam.py +++ b/salt/auth/pam.py @@ -24,15 +24,6 @@ The Python interface to PAM does not support authenticating as ``root``. -.. note:: Using PAM groups with SSSD groups on python2. - - To use sssd with the PAM eauth module and groups the `pysss` module is - needed. On RedHat/CentOS this is `python-sss`. - - This should not be needed with python >= 3.3, because the `os` modules has the - `getgrouplist` function. - - .. note:: This module executes itself in a subprocess in order to user the system python and pam libraries. We do this to avoid openssl version conflicts when running under a salt onedir build. diff --git a/salt/beacons/smartos_imgadm.py b/salt/beacons/smartos_imgadm.py index d0460789f96d..2432a54e79fd 100644 --- a/salt/beacons/smartos_imgadm.py +++ b/salt/beacons/smartos_imgadm.py @@ -105,6 +105,3 @@ def beacon(config): IMGADM_STATE["first_run"] = False return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/beacons/smartos_vmadm.py b/salt/beacons/smartos_vmadm.py index 163aed8a38b8..fd48253785bc 100644 --- a/salt/beacons/smartos_vmadm.py +++ b/salt/beacons/smartos_vmadm.py @@ -132,6 +132,3 @@ def beacon(config): VMADM_STATE["first_run"] = False return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/cache/consul.py b/salt/cache/consul.py index 14c08cc4d25a..547e2fe4e2df 100644 --- a/salt/cache/consul.py +++ b/salt/cache/consul.py @@ -3,7 +3,7 @@ .. versionadded:: 2016.11.2 -.. versionchanged:: 3005.0 +.. versionchanged:: 3005 Timestamp/cache updated support added. diff --git a/salt/channel/client.py b/salt/channel/client.py index e5b073ccdba1..0ca3cb7b76d8 100644 --- a/salt/channel/client.py +++ b/salt/channel/client.py @@ -40,6 +40,9 @@ log = logging.getLogger(__name__) +REQUEST_CHANNEL_TIMEOUT = 60 +REQUEST_CHANNEL_TRIES = 3 + class ReqChannel: """ @@ -121,6 +124,9 @@ def factory(cls, opts, **kwargs): if io_loop is None: io_loop = salt.ext.tornado.ioloop.IOLoop.current() + timeout = opts.get("request_channel_timeout", REQUEST_CHANNEL_TIMEOUT) + tries = opts.get("request_channel_tries", REQUEST_CHANNEL_TRIES) + crypt = kwargs.get("crypt", "aes") if crypt != "clear": # we don't need to worry about auth as a kwarg, since its a singleton @@ -129,9 +135,17 @@ def factory(cls, opts, **kwargs): auth = None transport = salt.transport.request_client(opts, io_loop=io_loop) - return cls(opts, transport, auth) + return cls(opts, transport, auth, tries=tries, timeout=timeout) - def __init__(self, opts, transport, auth, **kwargs): + def __init__( + self, + opts, + transport, + auth, + timeout=REQUEST_CHANNEL_TIMEOUT, + tries=REQUEST_CHANNEL_TRIES, + **kwargs + ): self.opts = dict(opts) self.transport = transport self.auth = auth @@ -139,6 +153,8 @@ def __init__(self, opts, transport, auth, **kwargs): if self.auth: self.master_pubkey_path = os.path.join(self.opts["pki_dir"], self.auth.mpub) self._closing = False + self.timeout = timeout + self.tries = tries @property def crypt(self): @@ -157,28 +173,54 @@ def _package_load(self, load): "version": 2, } + @salt.ext.tornado.gen.coroutine + def _send_with_retry(self, load, tries, timeout): + _try = 1 + while True: + try: + ret = yield self.transport.send( + load, + timeout=timeout, + ) + break + except Exception as exc: # pylint: disable=broad-except + log.trace("Failed to send msg %r", exc) + if _try >= tries: + raise + else: + _try += 1 + continue + raise salt.ext.tornado.gen.Return(ret) + @salt.ext.tornado.gen.coroutine def crypted_transfer_decode_dictentry( self, load, dictkey=None, - timeout=60, + timeout=None, + tries=None, ): + if timeout is None: + timeout = self.timeout + if tries is None: + tries = self.tries nonce = uuid.uuid4().hex load["nonce"] = nonce if not self.auth.authenticated: yield self.auth.authenticate() - ret = yield self.transport.send( + ret = yield self._send_with_retry( self._package_load(self.auth.crypticle.dumps(load)), - timeout=timeout, + tries, + timeout, ) key = self.auth.get_keys() if "key" not in ret: # Reauth in the case our key is deleted on the master side. yield self.auth.authenticate() - ret = yield self.transport.send( + ret = yield self._send_with_retry( self._package_load(self.auth.crypticle.dumps(load)), - timeout=timeout, + tries, + timeout, ) if HAS_M2: aes = key.private_decrypt(ret["key"], RSA.pkcs1_oaep_padding) @@ -210,7 +252,7 @@ def verify_signature(self, data, sig): return salt.crypt.verify_signature(self.master_pubkey_path, data, sig) @salt.ext.tornado.gen.coroutine - def _crypted_transfer(self, load, timeout=60, raw=False): + def _crypted_transfer(self, load, timeout, raw=False): """ Send a load across the wire, with encryption @@ -257,7 +299,7 @@ def _do_transfer(): raise salt.ext.tornado.gen.Return(ret) @salt.ext.tornado.gen.coroutine - def _uncrypted_transfer(self, load, timeout=60): + def _uncrypted_transfer(self, load, timeout): """ Send a load across the wire in cleartext @@ -276,7 +318,7 @@ def connect(self): yield self.transport.connect() @salt.ext.tornado.gen.coroutine - def send(self, load, tries=3, timeout=60, raw=False): + def send(self, load, tries=None, timeout=None, raw=False): """ Send a request, return a future which will complete when we send the message @@ -284,6 +326,10 @@ def send(self, load, tries=3, timeout=60, raw=False): :param int tries: The number of times to make before failure :param int timeout: The number of seconds on a response before failing """ + if timeout is None: + timeout = self.timeout + if tries is None: + tries = self.tries _try = 1 while True: try: @@ -506,19 +552,16 @@ def connect_callback(self, result): "data": data, "tag": tag, } - req_channel = AsyncReqChannel.factory(self.opts) - try: - yield req_channel.send(load, timeout=60) - except salt.exceptions.SaltReqTimeoutError: - log.info( - "fire_master failed: master could not be contacted. Request timed" - " out." - ) - except Exception: # pylint: disable=broad-except - log.info("fire_master failed", exc_info=True) - finally: - # SyncWrapper will call either close() or destroy(), whichever is available - del req_channel + with AsyncReqChannel.factory(self.opts) as channel: + try: + yield channel.send(load, timeout=60) + except salt.exceptions.SaltReqTimeoutError: + log.info( + "fire_master failed: master could not be contacted. Request timed" + " out." + ) + except Exception: # pylint: disable=broad-except + log.info("fire_master failed", exc_info=True) else: self._reconnected = True except Exception as exc: # pylint: disable=broad-except diff --git a/salt/channel/server.py b/salt/channel/server.py index a2117f2934d9..b6d51fef0857 100644 --- a/salt/channel/server.py +++ b/salt/channel/server.py @@ -22,6 +22,7 @@ import salt.utils.platform import salt.utils.stringutils import salt.utils.verify +from salt.exceptions import SaltDeserializationError from salt.utils.cache import CacheCli try: @@ -252,6 +253,15 @@ def _update_aes(self): return False def _decode_payload(self, payload): + # Sometimes msgpack deserialization of random bytes could be successful, + # so we need to ensure payload in good shape to process this function. + if ( + not isinstance(payload, dict) + or "enc" not in payload + or "load" not in payload + ): + raise SaltDeserializationError("bad load received on socket!") + # we need to decrypt it if payload["enc"] == "aes": try: diff --git a/salt/cli/daemons.py b/salt/cli/daemons.py index 864884e92a39..ecc05c919ef9 100644 --- a/salt/cli/daemons.py +++ b/salt/cli/daemons.py @@ -563,7 +563,6 @@ def prepare(self): verify_env( [ self.config["pki_dir"], - self.config["cachedir"], self.config["sock_dir"], self.config["extension_modules"], ], diff --git a/salt/cli/run.py b/salt/cli/run.py index 93387479bca0..fba70fffbecc 100644 --- a/salt/cli/run.py +++ b/salt/cli/run.py @@ -31,13 +31,19 @@ def run(self): if check_user(self.config["user"]): pr = salt.utils.profile.activate_profile(profiling_enabled) try: - ret = runner.run() + ret = runner.run(full_return=True) # In older versions ret['data']['retcode'] was used # for signaling the return code. This has been # changed for the orchestrate runner, but external # runners might still use it. For this reason, we # also check ret['data']['retcode'] if # ret['retcode'] is not available. + if ( + isinstance(ret, dict) + and "return" in ret + and "retcode" not in ret + ): + ret = ret["return"] if isinstance(ret, dict) and "retcode" in ret: self.exit(ret["retcode"]) elif isinstance(ret, dict) and "retcode" in ret.get("data", {}): diff --git a/salt/cli/ssh.py b/salt/cli/ssh.py index 6048cb5f58fb..78522a044a9b 100644 --- a/salt/cli/ssh.py +++ b/salt/cli/ssh.py @@ -16,4 +16,7 @@ def run(self): self.parse_args() ssh = salt.client.ssh.SSH(self.config) - ssh.run() + try: + ssh.run() + finally: + ssh.fsclient.destroy() diff --git a/salt/client/__init__.py b/salt/client/__init__.py index 7ce8963b8f64..307ce8a0ad41 100644 --- a/salt/client/__init__.py +++ b/salt/client/__init__.py @@ -299,7 +299,7 @@ def gather_job_info(self, jid, tgt, tgt_type, listen=True, **kwargs): tgt_type=tgt_type, timeout=timeout, listen=listen, - **kwargs + **kwargs, ) if "jid" in pub_data: @@ -365,7 +365,7 @@ def run_job( jid="", kwarg=None, listen=False, - **kwargs + **kwargs, ): """ Asynchronously send a command to connected minions @@ -393,7 +393,7 @@ def run_job( jid=jid, timeout=self._get_timeout(timeout), listen=listen, - **kwargs + **kwargs, ) except SaltClientError: # Re-raise error with specific message @@ -429,7 +429,7 @@ def run_job_async( kwarg=None, listen=True, io_loop=None, - **kwargs + **kwargs, ): """ Asynchronously send a command to connected minions @@ -458,7 +458,7 @@ def run_job_async( timeout=self._get_timeout(timeout), io_loop=io_loop, listen=listen, - **kwargs + **kwargs, ) except SaltClientError: # Re-raise error with specific message @@ -511,7 +511,7 @@ def cmd_subset( cli=False, progress=False, full_return=False, - **kwargs + **kwargs, ): """ Execute a command on a random subset of the targeted systems @@ -553,7 +553,7 @@ def cmd_subset( kwarg=kwarg, progress=progress, full_return=full_return, - **kwargs + **kwargs, ) def cmd_batch( @@ -565,7 +565,7 @@ def cmd_batch( ret="", kwarg=None, batch="10%", - **kwargs + **kwargs, ): """ Iteratively execute a command on subsets of minions at a time @@ -641,7 +641,7 @@ def cmd( jid="", full_return=False, kwarg=None, - **kwargs + **kwargs, ): """ Synchronously execute a command on targeted minions @@ -759,7 +759,7 @@ def cmd( jid, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -772,7 +772,7 @@ def cmd( self._get_timeout(timeout), tgt, tgt_type, - **kwargs + **kwargs, ): if fn_ret: @@ -797,7 +797,7 @@ def cmd_cli( verbose=False, kwarg=None, progress=False, - **kwargs + **kwargs, ): """ Used by the :command:`salt` CLI. This method returns minion returns as @@ -821,7 +821,7 @@ def cmd_cli( timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not self.pub_data: yield self.pub_data @@ -835,7 +835,7 @@ def cmd_cli( tgt_type, verbose, progress, - **kwargs + **kwargs, ): if not fn_ret: @@ -866,7 +866,7 @@ def cmd_iter( tgt_type="glob", ret="", kwarg=None, - **kwargs + **kwargs, ): """ Yields the individual minion returns as they come in @@ -901,7 +901,7 @@ def cmd_iter( timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -915,7 +915,7 @@ def cmd_iter( timeout=self._get_timeout(timeout), tgt=tgt, tgt_type=tgt_type, - **kwargs + **kwargs, ): if not fn_ret: continue @@ -936,7 +936,7 @@ def cmd_iter_no_block( kwarg=None, show_jid=False, verbose=False, - **kwargs + **kwargs, ): """ Yields the individual minion returns as they come in, or None @@ -972,7 +972,7 @@ def cmd_iter_no_block( timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -985,7 +985,7 @@ def cmd_iter_no_block( tgt=tgt, tgt_type=tgt_type, block=False, - **kwargs + **kwargs, ): if fn_ret and any([show_jid, verbose]): for minion in fn_ret: @@ -1007,7 +1007,7 @@ def cmd_full_return( ret="", verbose=False, kwarg=None, - **kwargs + **kwargs, ): """ Execute a salt command and return @@ -1024,7 +1024,7 @@ def cmd_full_return( timeout, kwarg=kwarg, listen=True, - **kwargs + **kwargs, ) if not pub_data: @@ -1046,7 +1046,7 @@ def get_cli_returns( tgt_type="glob", verbose=False, show_jid=False, - **kwargs + **kwargs, ): """ Starts a watcher looking at the return data for a specified JID @@ -1123,7 +1123,7 @@ def get_iter_returns( tgt_type="glob", expect_minions=False, block=True, - **kwargs + **kwargs, ): """ Watch the event system and return job data as it comes in @@ -1202,7 +1202,13 @@ def get_iter_returns( if "missing" in raw.get("data", {}): missing.update(raw["data"]["missing"]) continue + + # Anything below this point is expected to be a job return event. + if not raw["tag"].startswith(f"salt/job/{jid}/ret"): + log.debug("Skipping non return event: %s", raw["tag"]) + continue if "return" not in raw["data"]: + log.warning("Malformed event return: %s", raw["tag"]) continue if kwargs.get("raw", False): found.add(raw["data"]["id"]) @@ -1628,7 +1634,7 @@ def get_cli_event_returns( progress=False, show_timeout=False, show_jid=False, - **kwargs + **kwargs, ): """ Get the returns for the command line interface via the event system @@ -1658,7 +1664,7 @@ def get_cli_event_returns( expect_minions=( kwargs.pop("expect_minions", False) or verbose or show_timeout ), - **kwargs + **kwargs, ): log.debug("return event: %s", ret) return_count = return_count + 1 @@ -1851,7 +1857,7 @@ def pub( jid="", timeout=5, listen=False, - **kwargs + **kwargs, ): """ Take the required arguments and publish the given command. @@ -1953,7 +1959,7 @@ def pub_async( timeout=5, io_loop=None, listen=True, - **kwargs + **kwargs, ): """ Take the required arguments and publish the given command. diff --git a/salt/client/mixins.py b/salt/client/mixins.py index 9cefe54cd64e..c6b479e106b3 100644 --- a/salt/client/mixins.py +++ b/salt/client/mixins.py @@ -246,6 +246,8 @@ def cmd( self.functions[fun], arglist, pub_data ) low = {"fun": fun, "arg": args, "kwarg": kwargs} + if "user" in pub_data: + low["__user__"] = pub_data["user"] return self.low(fun, low, print_event=print_event, full_return=full_return) @property @@ -410,6 +412,7 @@ def low(self, fun, low, print_event=True, full_return=False): traceback.format_exc(), ) data["success"] = False + data["retcode"] = 1 if self.store_job: try: @@ -510,7 +513,17 @@ def _proc_function_remote( @classmethod def _proc_function( - cls, *, instance, opts, fun, low, user, tag, jid, daemonize=True + cls, + *, + instance, + opts, + fun, + low, + user, + tag, + jid, + daemonize=True, + full_return=False ): """ Run this method in a multiprocess target to execute the function @@ -535,7 +548,7 @@ def _proc_function( low["__user__"] = user low["__tag__"] = tag - return instance.low(fun, low) + return instance.low(fun, low, full_return=full_return) def cmd_async(self, low): """ diff --git a/salt/client/ssh/__init__.py b/salt/client/ssh/__init__.py index 81e5b580bbe9..e24c0b453449 100644 --- a/salt/client/ssh/__init__.py +++ b/salt/client/ssh/__init__.py @@ -11,9 +11,11 @@ import logging import multiprocessing import os +import pathlib import queue import re import shlex +import shutil import subprocess import sys import tarfile @@ -304,7 +306,7 @@ def __init__(self, opts): } if self.opts.get("rand_thin_dir"): self.defaults["thin_dir"] = os.path.join( - "/var/tmp", ".{}".format(uuid.uuid4().hex[:6]) + "/var/tmp", f".{uuid.uuid4().hex[:6]}" ) self.opts["ssh_wipe"] = "True" self.returners = salt.loader.returners(self.opts, {}) @@ -454,9 +456,9 @@ def get_pubkey(self): priv = self.opts.get( "ssh_priv", os.path.join(self.opts["pki_dir"], "ssh", "salt-ssh.rsa") ) - pub = "{}.pub".format(priv) + pub = f"{priv}.pub" with salt.utils.files.fopen(pub, "r") as fp_: - return "{} rsa root@master".format(fp_.read().split()[1]) + return f"{fp_.read().split()[1]} rsa root@master" def key_deploy(self, host, ret): """ @@ -467,7 +469,14 @@ def key_deploy(self, host, ret): if target.get("passwd", False) or self.opts["ssh_passwd"]: self._key_deploy_run(host, target, False) return ret - if ret[host].get("stderr", "").count("Permission denied"): + stderr = ret[host].get("stderr", "") + # -failed to upload file- is detecting scp errors + # Errors to ignore when Permission denied is in the stderr. For example + # scp can get a permission denied on the target host, but they where + # able to accurate authenticate against the box + ignore_err = ["failed to upload file"] + check_err = [x for x in ignore_err if stderr.count(x)] + if "Permission denied" in stderr and not check_err: target = self.targets[host] # permission denied, attempt to auto deploy ssh key print( @@ -500,7 +509,7 @@ def _key_deploy_run(self, host, target, re_run=True): mods=self.mods, fsclient=self.fsclient, thin=self.thin, - **target + **target, ) if salt.utils.path.which("ssh-copy-id"): # we have ssh-copy-id, use it! @@ -516,7 +525,7 @@ def _key_deploy_run(self, host, target, re_run=True): mods=self.mods, fsclient=self.fsclient, thin=self.thin, - **target + **target, ) stdout, stderr, retcode = single.cmd_block() try: @@ -543,15 +552,32 @@ def handle_routine(self, que, opts, host, target, mine=False): fsclient=self.fsclient, thin=self.thin, mine=mine, - **target + **target, ) ret = {"id": single.id} stdout, stderr, retcode = single.run() + try: + retcode = int(retcode) + except (TypeError, ValueError): + log.warning(f"Got an invalid retcode for host '{host}': '{retcode}'") + retcode = 1 # This job is done, yield try: data = salt.utils.json.find_json(stdout) if len(data) < 2 and "local" in data: ret["ret"] = data["local"] + try: + # Ensure a reported local retcode is kept + remote_retcode = data["local"]["retcode"] + try: + retcode = int(remote_retcode) + except (TypeError, ValueError): + log.warning( + f"Host '{host}' reported an invalid retcode: '{remote_retcode}'" + ) + retcode = max(retcode, 1) + except (KeyError, TypeError): + pass else: ret["ret"] = { "stdout": stdout, @@ -564,7 +590,7 @@ def handle_routine(self, que, opts, host, target, mine=False): "stderr": stderr, "retcode": retcode, } - que.put(ret) + que.put((ret, retcode)) def handle_ssh(self, mine=False): """ @@ -608,7 +634,7 @@ def handle_ssh(self, mine=False): "fun": "", "id": host, } - yield {host: no_ret} + yield {host: no_ret}, 1 continue args = ( que, @@ -622,11 +648,12 @@ def handle_ssh(self, mine=False): running[host] = {"thread": routine} continue ret = {} + retcode = 0 try: - ret = que.get(False) + ret, retcode = que.get(False) if "id" in ret: returned.add(ret["id"]) - yield {ret["id"]: ret["ret"]} + yield {ret["id"]: ret["ret"]}, retcode except queue.Empty: pass for host in running: @@ -636,10 +663,10 @@ def handle_ssh(self, mine=False): # last checked try: while True: - ret = que.get(False) + ret, retcode = que.get(False) if "id" in ret: returned.add(ret["id"]) - yield {ret["id"]: ret["ret"]} + yield {ret["id"]: ret["ret"]}, retcode except queue.Empty: pass @@ -650,7 +677,7 @@ def handle_ssh(self, mine=False): ) ret = {"id": host, "ret": error} log.error(error) - yield {ret["id"]: ret["ret"]} + yield {ret["id"]: ret["ret"]}, 1 running[host]["thread"].join() rets.add(host) for host in rets: @@ -705,8 +732,8 @@ def run_iter(self, mine=False, jid=None): jid, job_load ) - for ret in self.handle_ssh(mine=mine): - host = next(iter(ret.keys())) + for ret, _ in self.handle_ssh(mine=mine): + host = next(iter(ret)) self.cache_job(jid, host, ret[host], fun) if self.event: id_, data = next(iter(ret.items())) @@ -792,22 +819,19 @@ def run(self, jid=None): ) if self.opts.get("verbose"): - msg = "Executing job with jid {}".format(jid) + msg = f"Executing job with jid {jid}" print(msg) print("-" * len(msg) + "\n") print("") sret = {} outputter = self.opts.get("output", "nested") final_exit = 0 - for ret in self.handle_ssh(): - host = next(iter(ret.keys())) - if isinstance(ret[host], dict): - host_ret = ret[host].get("retcode", 0) - if host_ret != 0: - final_exit = 1 - else: - # Error on host - final_exit = 1 + for ret, retcode in self.handle_ssh(): + host = next(iter(ret)) + if not isinstance(retcode, int): + log.warning(f"Host '{host}' returned an invalid retcode: {retcode}") + retcode = 1 + final_exit = max(final_exit, retcode) self.cache_job(jid, host, ret[host], fun) ret = self.key_deploy(host, ret) @@ -883,7 +907,7 @@ def __init__( remote_port_forwards=None, winrm=False, ssh_options=None, - **kwargs + **kwargs, ): # Get mine setting and mine_functions if defined in kwargs (from roster) self.mine = mine @@ -1007,19 +1031,36 @@ def run_ssh_pre_flight(self): """ Run our pre_flight script before running any ssh commands """ - script = os.path.join(tempfile.gettempdir(), self.ssh_pre_file) - - self.shell.send(self.ssh_pre_flight, script) - - return self.execute_script(script, script_args=self.ssh_pre_flight_args) + with tempfile.NamedTemporaryFile() as temp: + # ensure we use copyfile to not copy the file attributes + # we want to ensure we use the perms set by the secure + # NamedTemporaryFile + try: + shutil.copyfile(self.ssh_pre_flight, temp.name) + except OSError as err: + return ( + "", + "Could not copy pre flight script to temporary path", + 1, + ) + target_script = f".{pathlib.Path(temp.name).name}" + log.trace("Copying the pre flight script to target") + stdout, stderr, retcode = self.shell.send(temp.name, target_script) + if retcode != 0: + # We could not copy the script to the target + log.error("Could not copy the pre flight script to target") + return stdout, stderr, retcode + + log.trace("Executing the pre flight script on target") + return self.execute_script( + target_script, script_args=self.ssh_pre_flight_args + ) def check_thin_dir(self): """ check if the thindir exists on the remote machine """ - stdout, stderr, retcode = self.shell.exec_cmd( - "test -d {}".format(self.thin_dir) - ) + stdout, stderr, retcode = self.shell.exec_cmd(f"test -d {self.thin_dir}") if retcode != 0: return False return True @@ -1131,7 +1172,7 @@ def run_wfunc(self): self.id, fsclient=self.fsclient, minion_opts=self.minion_opts, - **self.target + **self.target, ) opts_pkg = pre_wrapper["test.opts_pkg"]() # pylint: disable=E1102 @@ -1170,9 +1211,11 @@ def run_wfunc(self): for grain in self.target["grains"]: opts_pkg["grains"][grain] = self.target["grains"][grain] + # Pillar compilation needs the master opts primarily, + # same as during regular operation. popts = {} - popts.update(opts_pkg["__master_opts__"]) popts.update(opts_pkg) + popts.update(opts_pkg["__master_opts__"]) pillar = salt.pillar.Pillar( popts, opts_pkg["grains"], @@ -1217,7 +1260,7 @@ def run_wfunc(self): self.id, fsclient=self.fsclient, minion_opts=self.minion_opts, - **self.target + **self.target, ) wrapper.fsclient.opts["cachedir"] = opts["cachedir"] self.wfuncs = salt.loader.ssh_wrapper(opts, wrapper, self.context) @@ -1265,7 +1308,7 @@ def run_wfunc(self): else: result = self.wfuncs[self.fun](*self.args, **self.kwargs) except TypeError as exc: - result = "TypeError encountered executing {}: {}".format(self.fun, exc) + result = f"TypeError encountered executing {self.fun}: {exc}" log.error(result, exc_info_on_loglevel=logging.DEBUG) retcode = 1 except Exception as exc: # pylint: disable=broad-except @@ -1274,6 +1317,10 @@ def run_wfunc(self): ) log.error(result, exc_info_on_loglevel=logging.DEBUG) retcode = 1 + + # Ensure retcode from wrappers is respected, especially state render exceptions + retcode = max(retcode, self.context.get("retcode", 0)) + # Mimic the json data-structure that "salt-call --local" will # emit (as seen in ssh_py_shim.py) if isinstance(result, dict) and "local" in result: @@ -1288,7 +1335,7 @@ def _cmd_str(self): """ if self.target.get("sudo"): sudo = ( - "sudo -p '{}'".format(salt.client.ssh.shell.SUDO_PROMPT) + f"sudo -p '{salt.client.ssh.shell.SUDO_PROMPT}'" if self.target.get("passwd") else "sudo" ) @@ -1360,20 +1407,18 @@ def execute_script(self, script, extension="py", pre_dir="", script_args=None): script_args = shlex.split(str(script_args)) args = " {}".format(" ".join([shlex.quote(str(el)) for el in script_args])) if extension == "ps1": - ret = self.shell.exec_cmd('"powershell {}"'.format(script)) + ret = self.shell.exec_cmd(f'"powershell {script}"') else: if not self.winrm: - ret = self.shell.exec_cmd( - "/bin/sh '{}{}'{}".format(pre_dir, script, args) - ) + ret = self.shell.exec_cmd(f"/bin/sh '{pre_dir}{script}'{args}") else: ret = saltwinshell.call_python(self, script) # Remove file from target system if not self.winrm: - self.shell.exec_cmd("rm '{}{}'".format(pre_dir, script)) + self.shell.exec_cmd(f"rm '{pre_dir}{script}'") else: - self.shell.exec_cmd("del {}".format(script)) + self.shell.exec_cmd(f"del {script}") return ret @@ -1388,18 +1433,20 @@ def shim_cmd(self, cmd_str, extension="py"): return self.shell.exec_cmd(cmd_str) # Write the shim to a temporary file in the default temp directory - with tempfile.NamedTemporaryFile( - mode="w+b", prefix="shim_", delete=False - ) as shim_tmp_file: + with tempfile.NamedTemporaryFile(mode="w+b", delete=False) as shim_tmp_file: shim_tmp_file.write(salt.utils.stringutils.to_bytes(cmd_str)) # Copy shim to target system, under $HOME/. - target_shim_file = ".{}.{}".format( - binascii.hexlify(os.urandom(6)).decode("ascii"), extension - ) + target_shim_file = f".{pathlib.Path(shim_tmp_file.name).name}" + if self.winrm: target_shim_file = saltwinshell.get_target_shim_file(self, target_shim_file) - self.shell.send(shim_tmp_file.name, target_shim_file, makedirs=True) + stdout, stderr, retcode = self.shell.send( + shim_tmp_file.name, target_shim_file, makedirs=True + ) + if retcode != 0: + log.error("Could not copy the shim script to target") + return stdout, stderr, retcode # Remove our shim file try: @@ -1461,7 +1508,7 @@ def cmd_block(self, is_retry=False): while re.search(RSTR_RE, stderr): stderr = re.split(RSTR_RE, stderr, 1)[1].strip() else: - return "ERROR: {}".format(error), stderr, retcode + return f"ERROR: {error}", stderr, retcode # FIXME: this discards output from ssh_shim if the shim succeeds. It should # always save the shim output regardless of shim success or failure. @@ -1521,7 +1568,7 @@ def cmd_block(self, is_retry=False): # If RSTR is not seen in both stdout and stderr then there # was a thin deployment problem. return ( - "ERROR: Failure deploying ext_mods: {}".format(stdout), + f"ERROR: Failure deploying ext_mods: {stdout}", stderr, retcode, ) @@ -1629,7 +1676,7 @@ def module_refresh(self): return -def lowstate_file_refs(chunks): +def lowstate_file_refs(chunks): # pragma: no cover """ Create a list of file ref objects to reconcile """ @@ -1682,50 +1729,49 @@ def mod_data(fsclient): "returners", ] ret = {} - envs = fsclient.envs() - ver_base = "" - for env in envs: - files = fsclient.file_list(env) - for ref in sync_refs: - mods_data = {} - pref = "_{}".format(ref) - for fn_ in sorted(files): - if fn_.startswith(pref): - if fn_.endswith((".py", ".so", ".pyx")): - full = salt.utils.url.create(fn_) - mod_path = fsclient.cache_file(full, env) - if not os.path.isfile(mod_path): - continue - mods_data[os.path.basename(fn_)] = mod_path - chunk = salt.utils.hashutils.get_hash(mod_path) - ver_base += chunk - if mods_data: - if ref in ret: - ret[ref].update(mods_data) - else: - ret[ref] = mods_data - if not ret: - return {} - - ver_base = salt.utils.stringutils.to_bytes(ver_base) - - ver = hashlib.sha1(ver_base).hexdigest() - ext_tar_path = os.path.join( - fsclient.opts["cachedir"], "ext_mods.{}.tgz".format(ver) - ) - mods = {"version": ver, "file": ext_tar_path} - if os.path.isfile(ext_tar_path): + with fsclient: + envs = fsclient.envs() + ver_base = "" + for env in envs: + files = fsclient.file_list(env) + for ref in sync_refs: + mods_data = {} + pref = f"_{ref}" + for fn_ in sorted(files): + if fn_.startswith(pref): + if fn_.endswith((".py", ".so", ".pyx")): + full = salt.utils.url.create(fn_) + mod_path = fsclient.cache_file(full, env) + if not os.path.isfile(mod_path): + continue + mods_data[os.path.basename(fn_)] = mod_path + chunk = salt.utils.hashutils.get_hash(mod_path) + ver_base += chunk + if mods_data: + if ref in ret: + ret[ref].update(mods_data) + else: + ret[ref] = mods_data + if not ret: + return {} + + ver_base = salt.utils.stringutils.to_bytes(ver_base) + + ver = hashlib.sha1(ver_base).hexdigest() + ext_tar_path = os.path.join(fsclient.opts["cachedir"], f"ext_mods.{ver}.tgz") + mods = {"version": ver, "file": ext_tar_path} + if os.path.isfile(ext_tar_path): + return mods + tfp = tarfile.open(ext_tar_path, "w:gz") + verfile = os.path.join(fsclient.opts["cachedir"], "ext_mods.ver") + with salt.utils.files.fopen(verfile, "w+") as fp_: + fp_.write(ver) + tfp.add(verfile, "ext_version") + for ref in ret: + for fn_ in ret[ref]: + tfp.add(ret[ref][fn_], os.path.join(ref, fn_)) + tfp.close() return mods - tfp = tarfile.open(ext_tar_path, "w:gz") - verfile = os.path.join(fsclient.opts["cachedir"], "ext_mods.ver") - with salt.utils.files.fopen(verfile, "w+") as fp_: - fp_.write(ver) - tfp.add(verfile, "ext_version") - for ref in ret: - for fn_ in ret[ref]: - tfp.add(ret[ref][fn_], os.path.join(ref, fn_)) - tfp.close() - return mods def ssh_version(): @@ -1761,7 +1807,7 @@ def _convert_args(args): for key in list(arg.keys()): if key == "__kwarg__": continue - converted.append("{}={}".format(key, arg[key])) + converted.append(f"{key}={arg[key]}") else: converted.append(arg) return converted diff --git a/salt/client/ssh/client.py b/salt/client/ssh/client.py index be9247cb157e..8727ce23c3c2 100644 --- a/salt/client/ssh/client.py +++ b/salt/client/ssh/client.py @@ -39,6 +39,10 @@ def __init__( # Salt API should never offer a custom roster! self.opts["__disable_custom_roster"] = disable_custom_roster + # Pillar compilation and nested SSH calls require the correct config_dir + # in __opts__, otherwise we will use the SSH minion's one later. + if "config_dir" not in self.opts: + self.opts["config_dir"] = os.path.dirname(c_path) def sanitize_kwargs(self, kwargs): roster_vals = [ diff --git a/salt/client/ssh/shell.py b/salt/client/ssh/shell.py index cfa82d13c2d4..2df328ed1f5a 100644 --- a/salt/client/ssh/shell.py +++ b/salt/client/ssh/shell.py @@ -464,6 +464,19 @@ def _run_cmd(self, cmd, key_accept=False, passwd_retries=3): if stdout: old_stdout = stdout time.sleep(0.01) - return ret_stdout, ret_stderr, term.exitstatus finally: term.close(terminate=True, kill=True) + # Ensure term.close is called before querying the exitstatus, otherwise + # it might still be None. + ret_status = term.exitstatus + if ret_status is None: + if term.signalstatus is not None: + # The process died because of an unhandled signal, report + # a non-zero exitcode bash-style. + ret_status = 128 + term.signalstatus + else: + log.warning( + "VT reported both exitstatus and signalstatus as None. " + "This is likely a bug." + ) + return ret_stdout, ret_stderr, ret_status diff --git a/salt/client/ssh/ssh_py_shim.py b/salt/client/ssh/ssh_py_shim.py index b77749f49532..9b8f9e0f6581 100644 --- a/salt/client/ssh/ssh_py_shim.py +++ b/salt/client/ssh/ssh_py_shim.py @@ -67,14 +67,14 @@ def get_system_encoding(): import locale try: - encoding = locale.getdefaultlocale()[-1] - except ValueError: - # A bad locale setting was most likely found: - # https://github.com/saltstack/salt/issues/26063 - pass + encoding = locale.getencoding() + except AttributeError: + # Python < 3.11 + encoding = locale.getpreferredencoding(do_setlocale=True) # This is now garbage collectable del locale + if not encoding: # This is most likely ascii which is not the best but we were # unable to find a better encoding. If this fails, we fall all diff --git a/salt/client/ssh/state.py b/salt/client/ssh/state.py index 4ee62a293a00..ccc72198b651 100644 --- a/salt/client/ssh/state.py +++ b/salt/client/ssh/state.py @@ -31,10 +31,17 @@ class SSHState(salt.state.State): Create a State object which wraps the SSH functions for state operations """ - def __init__(self, opts, pillar=None, wrapper=None, context=None): + def __init__( + self, + opts, + pillar_override=None, + wrapper=None, + context=None, + initial_pillar=None, + ): self.wrapper = wrapper self.context = context - super().__init__(opts, pillar) + super().__init__(opts, pillar_override, initial_pillar=initial_pillar) def load_modules(self, data=None, proxy=None): """ @@ -49,6 +56,28 @@ def load_modules(self, data=None, proxy=None): ) self.rend = salt.loader.render(self.opts, self.functions) + def _gather_pillar(self): + """ + The opts used during pillar rendering should contain the master + opts in the root namespace. self.opts is the modified minion opts, + containing the original master opts in __master_opts__. + """ + _opts = self.opts + popts = {} + # Pillar compilation needs the master opts primarily, + # same as during regular operation. + popts.update(_opts) + popts.update(_opts.get("__master_opts__", {})) + # But, salt.state.State takes the parameters for get_pillar from + # the opts, so we need to ensure they are correct for the minion. + popts["id"] = _opts["id"] + popts["saltenv"] = _opts["saltenv"] + popts["pillarenv"] = _opts.get("pillarenv") + self.opts = popts + pillar = super()._gather_pillar() + self.opts = _opts + return pillar + def check_refresh(self, data, ret): """ Stub out check_refresh @@ -69,10 +98,24 @@ class SSHHighState(salt.state.BaseHighState): stack = [] - def __init__(self, opts, pillar=None, wrapper=None, fsclient=None, context=None): + def __init__( + self, + opts, + pillar_override=None, + wrapper=None, + fsclient=None, + context=None, + initial_pillar=None, + ): self.client = fsclient salt.state.BaseHighState.__init__(self, opts) - self.state = SSHState(opts, pillar, wrapper, context=context) + self.state = SSHState( + opts, + pillar_override, + wrapper, + context=context, + initial_pillar=initial_pillar, + ) self.matchers = salt.loader.matchers(self.opts) self.tops = salt.loader.tops(self.opts) diff --git a/salt/client/ssh/wrapper/pillar.py b/salt/client/ssh/wrapper/pillar.py index 98fcb66a9ff3..bc1b625d5cb1 100644 --- a/salt/client/ssh/wrapper/pillar.py +++ b/salt/client/ssh/wrapper/pillar.py @@ -17,7 +17,7 @@ def get(key, default="", merge=False, delimiter=DEFAULT_TARGET_DELIM): """ - .. versionadded:: 0.14 + .. versionadded:: 0.14.0 Attempt to retrieve the named value from pillar, if the named value is not available return the passed default. The default return is an empty string. diff --git a/salt/client/ssh/wrapper/saltcheck.py b/salt/client/ssh/wrapper/saltcheck.py index d47b5cf68833..b0b945938094 100644 --- a/salt/client/ssh/wrapper/saltcheck.py +++ b/salt/client/ssh/wrapper/saltcheck.py @@ -9,6 +9,7 @@ import tempfile from contextlib import closing +import salt.fileclient import salt.utils.files import salt.utils.json import salt.utils.url @@ -28,65 +29,62 @@ def update_master_cache(states, saltenv="base"): # Setup for copying states to gendir gendir = tempfile.mkdtemp() trans_tar = salt.utils.files.mkstemp() - if "cp.fileclient_{}".format(id(__opts__)) not in __context__: - __context__[ - "cp.fileclient_{}".format(id(__opts__)) - ] = salt.fileclient.get_file_client(__opts__) - - # generate cp.list_states output and save to gendir - cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]()) - cp_output_file = os.path.join(gendir, "cp_output.txt") - with salt.utils.files.fopen(cp_output_file, "w") as fp: - fp.write(cp_output) - - # cp state directories to gendir - already_processed = [] - sls_list = salt.utils.args.split_input(states) - for state_name in sls_list: - # generate low data for each state and save to gendir - state_low_file = os.path.join(gendir, state_name + ".low") - state_low_output = salt.utils.json.dumps( - __salt__["state.show_low_sls"](state_name) - ) - with salt.utils.files.fopen(state_low_file, "w") as fp: - fp.write(state_low_output) - - state_name = state_name.replace(".", os.sep) - if state_name in already_processed: - log.debug("Already cached state for %s", state_name) - else: - file_copy_file = os.path.join(gendir, state_name + ".copy") - log.debug("copying %s to %s", state_name, gendir) - qualified_name = salt.utils.url.create(state_name, saltenv) - # Duplicate cp.get_dir to gendir - copy_result = __context__["cp.fileclient_{}".format(id(__opts__))].get_dir( - qualified_name, gendir, saltenv + with salt.fileclient.get_file_client(__opts__) as cp_fileclient: + + # generate cp.list_states output and save to gendir + cp_output = salt.utils.json.dumps(__salt__["cp.list_states"]()) + cp_output_file = os.path.join(gendir, "cp_output.txt") + with salt.utils.files.fopen(cp_output_file, "w") as fp: + fp.write(cp_output) + + # cp state directories to gendir + already_processed = [] + sls_list = salt.utils.args.split_input(states) + for state_name in sls_list: + # generate low data for each state and save to gendir + state_low_file = os.path.join(gendir, state_name + ".low") + state_low_output = salt.utils.json.dumps( + __salt__["state.show_low_sls"](state_name) ) - if copy_result: - copy_result = [dir.replace(gendir, state_cache) for dir in copy_result] - copy_result_output = salt.utils.json.dumps(copy_result) - with salt.utils.files.fopen(file_copy_file, "w") as fp: - fp.write(copy_result_output) - already_processed.append(state_name) + with salt.utils.files.fopen(state_low_file, "w") as fp: + fp.write(state_low_output) + + state_name = state_name.replace(".", os.sep) + if state_name in already_processed: + log.debug("Already cached state for %s", state_name) else: - # If files were not copied, assume state.file.sls was given and just copy state - state_name = os.path.dirname(state_name) file_copy_file = os.path.join(gendir, state_name + ".copy") - if state_name in already_processed: - log.debug("Already cached state for %s", state_name) + log.debug("copying %s to %s", state_name, gendir) + qualified_name = salt.utils.url.create(state_name, saltenv) + # Duplicate cp.get_dir to gendir + copy_result = cp_fileclient.get_dir(qualified_name, gendir, saltenv) + if copy_result: + copy_result = [ + dir.replace(gendir, state_cache) for dir in copy_result + ] + copy_result_output = salt.utils.json.dumps(copy_result) + with salt.utils.files.fopen(file_copy_file, "w") as fp: + fp.write(copy_result_output) + already_processed.append(state_name) else: - qualified_name = salt.utils.url.create(state_name, saltenv) - copy_result = __context__[ - "cp.fileclient_{}".format(id(__opts__)) - ].get_dir(qualified_name, gendir, saltenv) - if copy_result: - copy_result = [ - dir.replace(gendir, state_cache) for dir in copy_result - ] - copy_result_output = salt.utils.json.dumps(copy_result) - with salt.utils.files.fopen(file_copy_file, "w") as fp: - fp.write(copy_result_output) - already_processed.append(state_name) + # If files were not copied, assume state.file.sls was given and just copy state + state_name = os.path.dirname(state_name) + file_copy_file = os.path.join(gendir, state_name + ".copy") + if state_name in already_processed: + log.debug("Already cached state for %s", state_name) + else: + qualified_name = salt.utils.url.create(state_name, saltenv) + copy_result = cp_fileclient.get_dir( + qualified_name, gendir, saltenv + ) + if copy_result: + copy_result = [ + dir.replace(gendir, state_cache) for dir in copy_result + ] + copy_result_output = salt.utils.json.dumps(copy_result) + with salt.utils.files.fopen(file_copy_file, "w") as fp: + fp.write(copy_result_output) + already_processed.append(state_name) # turn gendir into tarball and remove gendir try: diff --git a/salt/client/ssh/wrapper/state.py b/salt/client/ssh/wrapper/state.py index 5bfc1ecd049d..aa61e07f81e8 100644 --- a/salt/client/ssh/wrapper/state.py +++ b/salt/client/ssh/wrapper/state.py @@ -8,6 +8,7 @@ import salt.client.ssh.shell import salt.client.ssh.state +import salt.defaults.exitcodes import salt.loader import salt.minion import salt.roster @@ -27,7 +28,7 @@ log = logging.getLogger(__name__) -def _ssh_state(chunks, st_kwargs, kwargs, test=False): +def _ssh_state(chunks, st_kwargs, kwargs, pillar, test=False): """ Function to run a state with the given chunk via salt-ssh """ @@ -42,7 +43,7 @@ def _ssh_state(chunks, st_kwargs, kwargs, test=False): __context__["fileclient"], chunks, file_refs, - __pillar__.value(), + pillar, st_kwargs["id_"], ) trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, __opts__["hash_type"]) @@ -54,7 +55,7 @@ def _ssh_state(chunks, st_kwargs, kwargs, test=False): cmd, fsclient=__context__["fileclient"], minion_opts=__salt__.minion_opts, - **st_kwargs + **st_kwargs, ) single.shell.send(trans_tar, "{}/salt_state.tgz".format(__opts__["thin_dir"])) stdout, stderr, _ = single.cmd_block() @@ -84,14 +85,14 @@ def _set_retcode(ret, highstate=None): """ # Set default retcode to 0 - __context__["retcode"] = 0 + __context__["retcode"] = salt.defaults.exitcodes.EX_OK if isinstance(ret, list): - __context__["retcode"] = 1 + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return if not salt.utils.state.check_result(ret, highstate=highstate): - __context__["retcode"] = 2 + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_FAILURE def _check_pillar(kwargs, pillar=None): @@ -172,16 +173,30 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs): """ st_kwargs = __salt__.kwargs __opts__["grains"] = __grains__.value() - __pillar__.update(kwargs.get("pillar", {})) opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) opts["test"] = _get_test_value(test, **kwargs) + initial_pillar = _get_initial_pillar(opts) + pillar_override = kwargs.get("pillar") with salt.client.ssh.state.SSHHighState( opts, - __pillar__.value(), + pillar_override, __salt__.value(), __context__["fileclient"], context=__context__.value(), + initial_pillar=initial_pillar, ) as st_: + if not _check_pillar(kwargs, st_.opts["pillar"]): + __context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE + err = ["Pillar failed to render with the following messages:"] + err += st_.opts["pillar"]["_errors"] + return err + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] + if pillar_override is not None or initial_pillar is None: + # Ensure other wrappers use the correct pillar + __pillar__.update(pillar) st_.push_active() mods = _parse_mods(mods) high_data, errors = st_.render_highstate( @@ -198,12 +213,14 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs): errors += ext_errors errors += st_.state.verify_high(high_data) if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors high_data, req_in_errors = st_.state.requisite_in(high_data) errors += req_in_errors high_data = st_.state.apply_exclude(high_data) # Verify that the high data is structurally sound if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors # Compile and verify the raw chunks chunks = st_.state.compile_high_data(high_data) @@ -223,7 +240,7 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs): __context__["fileclient"], chunks, file_refs, - __pillar__.value(), + pillar, st_kwargs["id_"], roster_grains, ) @@ -236,7 +253,7 @@ def sls(mods, saltenv="base", test=None, exclude=None, **kwargs): cmd, fsclient=__context__["fileclient"], minion_opts=__salt__.minion_opts, - **st_kwargs + **st_kwargs, ) single.shell.send(trans_tar, "{}/salt_state.tgz".format(opts["thin_dir"])) stdout, stderr, _ = single.cmd_block() @@ -316,17 +333,12 @@ def _check_queue(queue, kwargs): else: conflict = running(concurrent=kwargs.get("concurrent", False)) if conflict: - __context__["retcode"] = 1 + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return conflict def _get_initial_pillar(opts): - return ( - __pillar__ - if __opts__["__cli"] == "salt-call" - and opts["pillarenv"] == __opts__["pillarenv"] - else None - ) + return __pillar__.value() if opts["pillarenv"] == __opts__["pillarenv"] else None def low(data, **kwargs): @@ -345,10 +357,11 @@ def low(data, **kwargs): chunks = [data] with salt.client.ssh.state.SSHHighState( __opts__, - __pillar__.value(), + None, __salt__.value(), __context__["fileclient"], context=__context__.value(), + initial_pillar=__pillar__.value(), ) as st_: for chunk in chunks: chunk["__id__"] = ( @@ -384,7 +397,7 @@ def low(data, **kwargs): cmd, fsclient=__context__["fileclient"], minion_opts=__salt__.minion_opts, - **st_kwargs + **st_kwargs, ) single.shell.send(trans_tar, "{}/salt_state.tgz".format(__opts__["thin_dir"])) stdout, stderr, _ = single.cmd_block() @@ -432,17 +445,26 @@ def high(data, **kwargs): salt '*' state.high '{"vim": {"pkg": ["installed"]}}' """ - __pillar__.update(kwargs.get("pillar", {})) st_kwargs = __salt__.kwargs __opts__["grains"] = __grains__.value() opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) + pillar_override = kwargs.get("pillar") + initial_pillar = _get_initial_pillar(opts) with salt.client.ssh.state.SSHHighState( opts, - __pillar__.value(), + pillar_override, __salt__.value(), __context__["fileclient"], context=__context__.value(), + initial_pillar=initial_pillar, ) as st_: + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] + if pillar_override is not None or initial_pillar is None: + # Ensure other wrappers use the correct pillar + __pillar__.update(pillar) st_.push_active() chunks = st_.state.compile_high_data(data) file_refs = salt.client.ssh.state.lowstate_file_refs( @@ -461,7 +483,7 @@ def high(data, **kwargs): __context__["fileclient"], chunks, file_refs, - __pillar__.value(), + pillar, st_kwargs["id_"], roster_grains, ) @@ -474,7 +496,7 @@ def high(data, **kwargs): cmd, fsclient=__context__["fileclient"], minion_opts=__salt__.minion_opts, - **st_kwargs + **st_kwargs, ) single.shell.send(trans_tar, "{}/salt_state.tgz".format(opts["thin_dir"])) stdout, stderr, _ = single.cmd_block() @@ -550,7 +572,7 @@ def request(mods=None, **kwargs): try: if salt.utils.platform.is_windows(): # Make sure cache file isn't read-only - __salt__["cmd.run"]('attrib -R "{}"'.format(notify_path)) + __salt__["cmd.run"](f'attrib -R "{notify_path}"') with salt.utils.files.fopen(notify_path, "w+b") as fp_: salt.payload.dump(req, fp_) except OSError: @@ -614,7 +636,7 @@ def clear_request(name=None): try: if salt.utils.platform.is_windows(): # Make sure cache file isn't read-only - __salt__["cmd.run"]('attrib -R "{}"'.format(notify_path)) + __salt__["cmd.run"](f'attrib -R "{notify_path}"') with salt.utils.files.fopen(notify_path, "w+b") as fp_: salt.payload.dump(req, fp_) except OSError: @@ -669,18 +691,32 @@ def highstate(test=None, **kwargs): salt '*' state.highstate exclude=sls_to_exclude salt '*' state.highstate exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]" """ - __pillar__.update(kwargs.get("pillar", {})) st_kwargs = __salt__.kwargs __opts__["grains"] = __grains__.value() opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) opts["test"] = _get_test_value(test, **kwargs) + pillar_override = kwargs.get("pillar") + initial_pillar = _get_initial_pillar(opts) with salt.client.ssh.state.SSHHighState( opts, - __pillar__.value(), + pillar_override, __salt__.value(), __context__["fileclient"], context=__context__.value(), + initial_pillar=initial_pillar, ) as st_: + if not _check_pillar(kwargs, st_.opts["pillar"]): + __context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE + err = ["Pillar failed to render with the following messages:"] + err += st_.opts["pillar"]["_errors"] + return err + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] + if pillar_override is not None or initial_pillar is None: + # Ensure other wrappers use the correct pillar + __pillar__.update(pillar) st_.push_active() chunks = st_.compile_low_chunks(context=__context__.value()) file_refs = salt.client.ssh.state.lowstate_file_refs( @@ -692,7 +728,7 @@ def highstate(test=None, **kwargs): # Check for errors for chunk in chunks: if not isinstance(chunk, dict): - __context__["retcode"] = 1 + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return chunks roster = salt.roster.Roster(opts, opts.get("roster", "flat")) @@ -704,7 +740,7 @@ def highstate(test=None, **kwargs): __context__["fileclient"], chunks, file_refs, - __pillar__.value(), + pillar, st_kwargs["id_"], roster_grains, ) @@ -717,7 +753,7 @@ def highstate(test=None, **kwargs): cmd, fsclient=__context__["fileclient"], minion_opts=__salt__.minion_opts, - **st_kwargs + **st_kwargs, ) single.shell.send(trans_tar, "{}/salt_state.tgz".format(opts["thin_dir"])) stdout, stderr, _ = single.cmd_block() @@ -751,24 +787,40 @@ def top(topfn, test=None, **kwargs): salt '*' state.top reverse_top.sls exclude=sls_to_exclude salt '*' state.top reverse_top.sls exclude="[{'id': 'id_to_exclude'}, {'sls': 'sls_to_exclude'}]" """ - __pillar__.update(kwargs.get("pillar", {})) st_kwargs = __salt__.kwargs __opts__["grains"] = __grains__.value() opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) - if salt.utils.args.test_mode(test=test, **kwargs): - opts["test"] = True - else: - opts["test"] = __opts__.get("test", None) + opts["test"] = _get_test_value(test, **kwargs) + pillar_override = kwargs.get("pillar") + initial_pillar = _get_initial_pillar(opts) with salt.client.ssh.state.SSHHighState( opts, - __pillar__.value(), + pillar_override, __salt__.value(), __context__["fileclient"], context=__context__.value(), + initial_pillar=initial_pillar, ) as st_: + if not _check_pillar(kwargs, st_.opts["pillar"]): + __context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE + err = ["Pillar failed to render with the following messages:"] + err += st_.opts["pillar"]["_errors"] + return err + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] + if pillar_override is not None or initial_pillar is None: + # Ensure other wrappers use the correct pillar + __pillar__.update(pillar) st_.opts["state_top"] = os.path.join("salt://", topfn) st_.push_active() chunks = st_.compile_low_chunks(context=__context__.value()) + # Check for errors + for chunk in chunks: + if not isinstance(chunk, dict): + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR + return chunks file_refs = salt.client.ssh.state.lowstate_file_refs( chunks, _merge_extra_filerefs( @@ -785,7 +837,7 @@ def top(topfn, test=None, **kwargs): __context__["fileclient"], chunks, file_refs, - __pillar__.value(), + pillar, st_kwargs["id_"], roster_grains, ) @@ -798,7 +850,7 @@ def top(topfn, test=None, **kwargs): cmd, fsclient=__context__["fileclient"], minion_opts=__salt__.minion_opts, - **st_kwargs + **st_kwargs, ) single.shell.send(trans_tar, "{}/salt_state.tgz".format(opts["thin_dir"])) stdout, stderr, _ = single.cmd_block() @@ -832,15 +884,34 @@ def show_highstate(**kwargs): """ __opts__["grains"] = __grains__.value() opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) + pillar_override = kwargs.get("pillar") + initial_pillar = _get_initial_pillar(opts) with salt.client.ssh.state.SSHHighState( opts, - __pillar__, + pillar_override, __salt__, __context__["fileclient"], context=__context__.value(), + initial_pillar=initial_pillar, ) as st_: + if not _check_pillar(kwargs, st_.opts["pillar"]): + __context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE + err = ["Pillar failed to render with the following messages:"] + err += st_.opts["pillar"]["_errors"] + return err + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] + if pillar_override is not None or initial_pillar is None: + # Ensure other wrappers use the correct pillar + __pillar__.update(pillar) st_.push_active() chunks = st_.compile_highstate(context=__context__.value()) + # Check for errors + if not isinstance(chunks, dict): + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR + return chunks _cleanup_slsmod_high_data(chunks) return chunks @@ -859,11 +930,17 @@ def show_lowstate(**kwargs): opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) with salt.client.ssh.state.SSHHighState( opts, - __pillar__.value(), + None, __salt__, __context__["fileclient"], context=__context__.value(), + initial_pillar=_get_initial_pillar(opts), ) as st_: + if not _check_pillar(kwargs, st_.opts["pillar"]): + __context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE + err = ["Pillar failed to render with the following messages:"] + err += st_.opts["pillar"]["_errors"] + return err st_.push_active() chunks = st_.compile_low_chunks(context=__context__.value()) _cleanup_slsmod_low_data(chunks) @@ -902,7 +979,6 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs): salt '*' state.sls_id my_state my_module,a_common_module """ - __pillar__.update(kwargs.get("pillar", {})) st_kwargs = __salt__.kwargs conflict = _check_queue(queue, kwargs) if conflict is not None: @@ -916,20 +992,30 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs): if opts["saltenv"] is None: opts["saltenv"] = "base" + pillar_override = kwargs.get("pillar") + initial_pillar = _get_initial_pillar(opts) with salt.client.ssh.state.SSHHighState( __opts__, - __pillar__.value(), + pillar_override, __salt__, __context__["fileclient"], context=__context__.value(), + initial_pillar=initial_pillar, ) as st_: if not _check_pillar(kwargs, st_.opts["pillar"]): - __context__["retcode"] = 5 + __context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE err = ["Pillar failed to render with the following messages:"] err += __pillar__["_errors"] return err + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] + if pillar_override is not None or initial_pillar is None: + # Ensure other wrappers use the correct pillar + __pillar__.update(pillar) split_mods = _parse_mods(mods) st_.push_active() high_, errors = st_.render_highstate( @@ -943,7 +1029,7 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs): # but it is required to get the unit tests to pass. errors.extend(req_in_errors) if errors: - __context__["retcode"] = 1 + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors chunks = st_.state.compile_high_data(high_) chunk = [x for x in chunks if x.get("__id__", "") == id_] @@ -955,7 +1041,7 @@ def sls_id(id_, mods, test=None, queue=False, **kwargs): ) ) - ret = _ssh_state(chunk, st_kwargs, kwargs, test=test) + ret = _ssh_state(chunk, st_kwargs, kwargs, pillar, test=test) _set_retcode(ret, highstate=highstate) # Work around Windows multiprocessing bug, set __opts__['test'] back to # value from before this function was run. @@ -974,20 +1060,31 @@ def show_sls(mods, saltenv="base", test=None, **kwargs): salt '*' state.show_sls core,edit.vim dev """ - __pillar__.update(kwargs.get("pillar", {})) __opts__["grains"] = __grains__.value() opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) - if salt.utils.args.test_mode(test=test, **kwargs): - opts["test"] = True - else: - opts["test"] = __opts__.get("test", None) + opts["test"] = _get_test_value(test, **kwargs) + pillar_override = kwargs.get("pillar") + initial_pillar = _get_initial_pillar(opts) with salt.client.ssh.state.SSHHighState( opts, - __pillar__.value(), + pillar_override, __salt__, __context__["fileclient"], context=__context__.value(), + initial_pillar=initial_pillar, ) as st_: + if not _check_pillar(kwargs, st_.opts["pillar"]): + __context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE + err = ["Pillar failed to render with the following messages:"] + err += st_.opts["pillar"]["_errors"] + return err + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] + if pillar_override is not None or initial_pillar is None: + # Ensure other wrappers use the correct pillar + __pillar__.update(pillar) st_.push_active() mods = _parse_mods(mods) high_data, errors = st_.render_highstate( @@ -997,12 +1094,14 @@ def show_sls(mods, saltenv="base", test=None, **kwargs): errors += ext_errors errors += st_.state.verify_high(high_data) if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors high_data, req_in_errors = st_.state.requisite_in(high_data) errors += req_in_errors high_data = st_.state.apply_exclude(high_data) # Verify that the high data is structurally sound if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors _cleanup_slsmod_high_data(high_data) return high_data @@ -1021,21 +1120,31 @@ def show_low_sls(mods, saltenv="base", test=None, **kwargs): salt '*' state.show_low_sls core,edit.vim dev """ - __pillar__.update(kwargs.get("pillar", {})) __opts__["grains"] = __grains__.value() - opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) - if salt.utils.args.test_mode(test=test, **kwargs): - opts["test"] = True - else: - opts["test"] = __opts__.get("test", None) + opts["test"] = _get_test_value(test, **kwargs) + pillar_override = kwargs.get("pillar") + initial_pillar = _get_initial_pillar(opts) with salt.client.ssh.state.SSHHighState( opts, - __pillar__.value(), + pillar_override, __salt__, __context__["fileclient"], context=__context__.value(), + initial_pillar=initial_pillar, ) as st_: + if not _check_pillar(kwargs, st_.opts["pillar"]): + __context__["retcode"] = salt.defaults.exitcodes.EX_PILLAR_FAILURE + err = ["Pillar failed to render with the following messages:"] + err += st_.opts["pillar"]["_errors"] + return err + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] + if pillar_override is not None or initial_pillar is None: + # Ensure other wrappers use the correct pillar + __pillar__.update(pillar) st_.push_active() mods = _parse_mods(mods) high_data, errors = st_.render_highstate( @@ -1045,12 +1154,14 @@ def show_low_sls(mods, saltenv="base", test=None, **kwargs): errors += ext_errors errors += st_.state.verify_high(high_data) if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors high_data, req_in_errors = st_.state.requisite_in(high_data) errors += req_in_errors high_data = st_.state.apply_exclude(high_data) # Verify that the high data is structurally sound if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors ret = st_.state.compile_high_data(high_data) _cleanup_slsmod_low_data(ret) @@ -1071,15 +1182,17 @@ def show_top(**kwargs): opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) with salt.client.ssh.state.SSHHighState( opts, - __pillar__.value(), + None, __salt__, __context__["fileclient"], context=__context__.value(), + initial_pillar=_get_initial_pillar(opts), ) as st_: top_data = st_.get_top(context=__context__.value()) errors = [] errors += st_.verify_tops(top_data) if errors: + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return errors matches = st_.top_matches(top_data) return matches @@ -1110,7 +1223,7 @@ def single(fun, name, test=None, **kwargs): # state.fun -> [state, fun] comps = fun.split(".") if len(comps) < 2: - __context__["retcode"] = 1 + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return "Invalid function passed" # Create the low chunk, using kwargs as a base @@ -1119,21 +1232,26 @@ def single(fun, name, test=None, **kwargs): opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) # Set test mode - if salt.utils.args.test_mode(test=test, **kwargs): - opts["test"] = True - else: - opts["test"] = __opts__.get("test", None) + opts["test"] = _get_test_value(test, **kwargs) # Get the override pillar data - __pillar__.update(kwargs.get("pillar", {})) + # This needs to be removed from the kwargs, they are called + # as a lowstate with one item, not a single chunk + pillar_override = kwargs.pop("pillar", None) # Create the State environment - st_ = salt.client.ssh.state.SSHState(opts, __pillar__) + st_ = salt.client.ssh.state.SSHState( + opts, pillar_override, initial_pillar=_get_initial_pillar(opts) + ) + try: + pillar = st_.opts["pillar"].value() + except AttributeError: + pillar = st_.opts["pillar"] # Verify the low chunk err = st_.verify_data(kwargs) if err: - __context__["retcode"] = 1 + __context__["retcode"] = salt.defaults.exitcodes.EX_STATE_COMPILER_ERROR return err # Must be a list of low-chunks @@ -1156,7 +1274,7 @@ def single(fun, name, test=None, **kwargs): __context__["fileclient"], chunks, file_refs, - __pillar__.value(), + pillar, st_kwargs["id_"], roster_grains, ) @@ -1175,7 +1293,7 @@ def single(fun, name, test=None, **kwargs): cmd, fsclient=__context__["fileclient"], minion_opts=__salt__.minion_opts, - **st_kwargs + **st_kwargs, ) # Copy the tar down diff --git a/salt/cloud/__init__.py b/salt/cloud/__init__.py index 2b21483599d3..a4d11eed59a9 100644 --- a/salt/cloud/__init__.py +++ b/salt/cloud/__init__.py @@ -1427,7 +1427,8 @@ def run_profile(self, profile, names, vm_overrides=None): raise SaltCloudSystemExit("Failed to deploy VM") continue if self.opts.get("show_deploy_args", False) is False: - ret[name].pop("deploy_kwargs", None) + if isinstance(ret[name], dict): + ret[name].pop("deploy_kwargs", None) except (SaltCloudSystemExit, SaltCloudConfigError) as exc: if len(names) == 1: raise diff --git a/salt/cloud/clouds/clc.py b/salt/cloud/clouds/clc.py index f2912531accc..c111c49a4813 100644 --- a/salt/cloud/clouds/clc.py +++ b/salt/cloud/clouds/clc.py @@ -2,7 +2,7 @@ CenturyLink Cloud Module ======================== -.. versionadded:: 2018.3 +.. versionadded:: 2018.3.0 The CLC cloud module allows you to manage CLC Via the CLC SDK. diff --git a/salt/cloud/deploy/bootstrap-salt.sh b/salt/cloud/deploy/bootstrap-salt.sh index 13f4471dcf4e..f66aeea3a8a2 100644 --- a/salt/cloud/deploy/bootstrap-salt.sh +++ b/salt/cloud/deploy/bootstrap-salt.sh @@ -23,7 +23,7 @@ #====================================================================================================================== set -o nounset # Treat unset variables as an error -__ScriptVersion="2022.10.04" +__ScriptVersion="2023.11.07" __ScriptName="bootstrap-salt.sh" __ScriptFullName="$0" @@ -224,7 +224,6 @@ _KEEP_TEMP_FILES=${BS_KEEP_TEMP_FILES:-$BS_FALSE} _TEMP_CONFIG_DIR="null" _SALTSTACK_REPO_URL="https://github.com/saltstack/salt.git" _SALT_REPO_URL=${_SALTSTACK_REPO_URL} -_DOWNSTREAM_PKG_REPO=$BS_FALSE _TEMP_KEYS_DIR="null" _SLEEP="${__DEFAULT_SLEEP}" _INSTALL_MASTER=$BS_FALSE @@ -269,6 +268,7 @@ _CUSTOM_MINION_CONFIG="null" _QUIET_GIT_INSTALLATION=$BS_FALSE _REPO_URL="repo.saltproject.io" _ONEDIR_DIR="salt" +_ONEDIR_NIGHTLY_DIR="salt-dev/${_ONEDIR_DIR}" _PY_EXE="python3" _INSTALL_PY="$BS_FALSE" _TORNADO_MAX_PY3_VERSION="5.0" @@ -276,6 +276,9 @@ _POST_NEON_INSTALL=$BS_FALSE _MINIMUM_PIP_VERSION="9.0.1" _MINIMUM_SETUPTOOLS_VERSION="9.1" _POST_NEON_PIP_INSTALL_ARGS="--prefix=/usr" +_PIP_DOWNLOAD_ARGS="" +_QUICK_START="$BS_FALSE" +_AUTO_ACCEPT_MINION_KEYS="$BS_FALSE" # Defaults for install arguments ITYPE="stable" @@ -309,21 +312,31 @@ __usage() { - onedir_rc Install latest onedir RC release. - onedir_rc [version] Install a specific version. Only supported for onedir RC packages available at repo.saltproject.io + - old-stable Install latest old stable release. + - old-stable [branch] Install latest version on a branch. Only supported + for packages available at repo.saltproject.io + - old-stable [version] Install a specific version. Only supported for + packages available at repo.saltproject.io + To pin a 3xxx minor version, specify it as 3xxx.0 Examples: - ${__ScriptName} - ${__ScriptName} stable - - ${__ScriptName} stable 2017.7 - - ${__ScriptName} stable 2017.7.2 + - ${__ScriptName} stable 3006 + - ${__ScriptName} stable 3006.1 - ${__ScriptName} testing - ${__ScriptName} git - ${__ScriptName} git 2017.7 - ${__ScriptName} git v2017.7.2 - ${__ScriptName} git 06f249901a2e2f1ed310d58ea3921a129f214358 - ${__ScriptName} onedir - - ${__ScriptName} onedir 3005 + - ${__ScriptName} onedir 3006 - ${__ScriptName} onedir_rc - - ${__ScriptName} onedir_rc 3005 + - ${__ScriptName} onedir_rc 3006 + - ${__ScriptName} old-stable + - ${__ScriptName} old-stable 3005 + - ${__ScriptName} old-stable 3005.1 + Options: -a Pip install all Python pkg dependencies for Salt. Requires -V to install @@ -384,11 +397,13 @@ __usage() { resort method. NOTE: This only works for functions which actually implement pip based installations. -q Quiet salt installation from git (setup.py install -q) + -Q Quickstart, install the Salt master and the Salt minion. + And automatically accept the minion key. -R Specify a custom repository URL. Assumes the custom repository URL points to a repository that mirrors Salt packages located at repo.saltproject.io. The option passed with -R replaces the "repo.saltproject.io". If -R is passed, -r is also set. Currently only - works on CentOS/RHEL and Debian based distributions. + works on CentOS/RHEL and Debian based distributions and macOS. -s Sleep time used when waiting for daemons to start, restart and when checking for the services running. Default: ${__DEFAULT_SLEEP} -S Also install salt-syndic @@ -399,12 +414,9 @@ __usage() { -v Display script version -V Install Salt into virtualenv (only available for Ubuntu based distributions) - -w Install packages from downstream package repository rather than - upstream, saltstack package repository. This is currently only - implemented for SUSE. -x Changes the Python version used to install Salt. For CentOS 6 git installations python2.7 is supported. - Fedora git installation, CentOS 7, Debian 9, Ubuntu 16.04 and 18.04 support python3. + Fedora git installation, CentOS 7, Ubuntu 18.04 support python3. -X Do not start daemons after installation -y Installs a different python version on host. Currently this has only been tested with CentOS 6 and is considered experimental. This will install the @@ -418,7 +430,7 @@ EOT } # ---------- end of function __usage ---------- -while getopts ':hvnDc:g:Gyx:wk:s:MSNXCPFUKIA:i:Lp:dH:bflV:J:j:rR:aq' opt +while getopts ':hvnDc:g:Gyx:k:s:MSNXCPFUKIA:i:Lp:dH:bflV:J:j:rR:aqQ' opt do case "${opt}" in @@ -434,7 +446,6 @@ do echowarn "No need to provide this option anymore, now it is a default behavior." ;; - w ) _DOWNSTREAM_PKG_REPO=$BS_TRUE ;; k ) _TEMP_KEYS_DIR="$OPTARG" ;; s ) _SLEEP=$OPTARG ;; M ) _INSTALL_MASTER=$BS_TRUE ;; @@ -463,6 +474,7 @@ do J ) _CUSTOM_MASTER_CONFIG=$OPTARG ;; j ) _CUSTOM_MINION_CONFIG=$OPTARG ;; q ) _QUIET_GIT_INSTALLATION=$BS_TRUE ;; + Q ) _QUICK_START=$BS_TRUE ;; x ) _PY_EXE="$OPTARG" ;; y ) _INSTALL_PY="$BS_TRUE" ;; @@ -593,7 +605,7 @@ if [ "$#" -gt 0 ];then fi # Check installation type -if [ "$(echo "$ITYPE" | grep -E '(stable|testing|git|onedir|onedir_rc)')" = "" ]; then +if [ "$(echo "$ITYPE" | grep -E '(stable|testing|git|onedir|onedir_rc|old-stable)')" = "" ]; then echoerror "Installation type \"$ITYPE\" is not known..." exit 1 fi @@ -613,20 +625,45 @@ if [ "$ITYPE" = "git" ]; then # If doing stable install, check if version specified elif [ "$ITYPE" = "stable" ]; then if [ "$#" -eq 0 ];then - STABLE_REV="latest" + ONEDIR_REV="latest" + _ONEDIR_REV="latest" + ITYPE="onedir" + else + if [ "$(echo "$1" | grep -E '^(nightly|latest|3005|3006)$')" != "" ]; then + ONEDIR_REV="$1" + _ONEDIR_REV="$1" + ITYPE="onedir" + shift + elif [ "$(echo "$1" | grep -E '^([3-9][0-5]{2}[5-9](\.[0-9]*)?)')" != "" ]; then + ONEDIR_REV="minor/$1" + _ONEDIR_REV="$1" + ITYPE="onedir" + shift + else + echo "Unknown stable version: $1 (valid: 3005, 3006, latest)" + exit 1 + fi + fi + +# If doing old-stable install, check if version specified +elif [ "$ITYPE" = "old-stable" ]; then + if [ "$#" -eq 0 ];then + ITYPE="stable" else - if [ "$(echo "$1" | grep -E '^(latest|1\.6|1\.7|2014\.1|2014\.7|2015\.5|2015\.8|2016\.3|2016\.11|2017\.7|2018\.3|2019\.2|3000|3001|3002|3003|3004|3005)$')" != "" ]; then + if [ "$(echo "$1" | grep -E '^(3003|3004|3005)$')" != "" ]; then STABLE_REV="$1" + ITYPE="stable" shift - elif [ "$(echo "$1" | grep -E '^(2[0-9]*\.[0-9]*\.[0-9]*|[3-9][0-9]{3}(\.[0-9]*)?)$')" != "" ]; then + elif [ "$(echo "$1" | grep -E '^([3-9][0-5]{3}(\.[0-9]*)?)$')" != "" ]; then # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix + ITYPE="stable" STABLE_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/') if [ "$(uname)" != "Darwin" ]; then STABLE_REV="archive/$STABLE_REV" fi shift else - echo "Unknown stable version: $1 (valid: 1.6, 1.7, 2014.1, 2014.7, 2015.5, 2015.8, 2016.3, 2016.11, 2017.7, 2018.3, 2019.2, 3000, 3001, 3002, 3003, 3004, 3005, latest, \$MAJOR.\$MINOR.\$PATCH until 2019.2, \$MAJOR or \$MAJOR.\$PATCH starting from 3000)" + echo "Unknown old stable version: $1 (valid: 3003, 3004, 3005)" exit 1 fi fi @@ -635,16 +672,19 @@ elif [ "$ITYPE" = "onedir" ]; then if [ "$#" -eq 0 ];then ONEDIR_REV="latest" else - if [ "$(echo "$1" | grep -E '^(latest|3005)$')" != "" ]; then + if [ "$(echo "$1" | grep -E '^(nightly|latest|3005|3006)$')" != "" ]; then ONEDIR_REV="$1" shift - elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}(\.[0-9]*)?)')" != "" ]; then - # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix - ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/') + elif [ "$(echo "$1" | grep -E '^(3005(\.[0-9]*)?)')" != "" ]; then + # Handle the 3005.0 version as 3005 archive (pin to minor) and strip the fake ".0" suffix + ONEDIR_REV=$(echo "$1" | sed -E 's/^(3005)\.0$/\1/') ONEDIR_REV="minor/$ONEDIR_REV" shift + elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}(\.[0-9]*)?)')" != "" ]; then + ONEDIR_REV="minor/$1" + shift else - echo "Unknown stable version: $1 (valid: 3005, latest.)" + echo "Unknown onedir version: $1 (valid: 3005, 3006, latest, nightly.)" exit 1 fi fi @@ -667,13 +707,43 @@ elif [ "$ITYPE" = "onedir_rc" ]; then #ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/') ONEDIR_REV="minor/$1" shift + elif [ "$(echo "$1" | grep -E '^([3-9][0-9]{3}\.[0-9]?rc[0-9]$)')" != "" ]; then + # Handle the 3xxx.0 version as 3xxx archive (pin to minor) and strip the fake ".0" suffix + #ONEDIR_REV=$(echo "$1" | sed -E 's/^([3-9][0-9]{3})\.0$/\1/') + ONEDIR_REV="minor/$1" + shift else - echo "Unknown stable version: $1 (valid: 3005-1, latest.)" + echo "Unknown onedir_rc version: $1 (valid: 3005-1, latest.)" exit 1 fi fi fi +# Doing a quick start, so install master +# set master address to 127.0.0.1 +if [ "$_QUICK_START" -eq "$BS_TRUE" ]; then + # make install type is stable + ITYPE="stable" + + # make sure the revision is latest + STABLE_REV="latest" + ONEDIR_REV="latest" + + # make sure we're installing the master + _INSTALL_MASTER=$BS_TRUE + + # override incase install minion + # is set to false + _INSTALL_MINION=$BS_TRUE + + # Set master address to loopback IP + _SALT_MASTER_ADDRESS="127.0.0.1" + + # Auto accept the minion key + # when the install is done. + _AUTO_ACCEPT_MINION_KEYS=$BS_TRUE +fi + # Check for any unparsed arguments. Should be an error. if [ "$#" -gt 0 ]; then __usage @@ -877,6 +947,18 @@ __fetch_verify() { return 1 } +#--- FUNCTION ------------------------------------------------------------------------------------------------------- +# NAME: __check_url_exists +# DESCRIPTION: Checks if a URL exists +#---------------------------------------------------------------------------------------------------------------------- +__check_url_exists() { + _URL="$1" + if curl --output /dev/null --silent --fail "${_URL}"; then + return 0 + else + return 1 + fi +} #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __gather_hardware_info # DESCRIPTION: Discover hardware information @@ -1365,7 +1447,7 @@ __gather_system_info() { #---------------------------------------------------------------------------------------------------------------------- # shellcheck disable=SC2034 __ubuntu_derivatives_translation() { - UBUNTU_DERIVATIVES="(trisquel|linuxmint|linaro|elementary_os|neon)" + UBUNTU_DERIVATIVES="(trisquel|linuxmint|linaro|elementary_os|neon|pop)" # Mappings trisquel_6_ubuntu_base="12.04" linuxmint_13_ubuntu_base="12.04" @@ -1378,6 +1460,8 @@ __ubuntu_derivatives_translation() { neon_16_ubuntu_base="16.04" neon_18_ubuntu_base="18.04" neon_20_ubuntu_base="20.04" + neon_22_ubuntu_base="22.04" + pop_22_ubuntu_base="22.04" # Translate Ubuntu derivatives to their base Ubuntu version match=$(echo "$DISTRO_NAME_L" | grep -E ${UBUNTU_DERIVATIVES}) @@ -1437,9 +1521,13 @@ __check_dpkg_architecture() { if [ "$_CUSTOM_REPO_URL" != "null" ]; then warn_msg="Support for arm64 is experimental, make sure the custom repository used has the expected structure and contents." else - # Saltstack official repository does not yet have arm64 metadata, - # use arm64 repositories on arm64, since all pkgs are arch-independent - __REPO_ARCH="arm64" + # Saltstack official repository has arm64 metadata beginning with Debian 11, + # use amd64 repositories on arm64 for anything older, since all pkgs are arch-independent + if [ "$DISTRO_NAME_L" = "debian" ] && [ "$DISTRO_MAJOR_VERSION" -lt 11 ]; then + __REPO_ARCH="amd64" + else + __REPO_ARCH="arm64" + fi __REPO_ARCH_DEB="deb [signed-by=/usr/share/keyrings/salt-archive-keyring.gpg arch=$__REPO_ARCH]" warn_msg="Support for arm64 packages is experimental and might rely on architecture-independent packages from the amd64 repository." fi @@ -1621,6 +1709,14 @@ __debian_codename_translation() { "11") DISTRO_CODENAME="bullseye" ;; + "12") + DISTRO_CODENAME="bookworm" + # FIXME - TEMPORARY + # use bullseye packages until bookworm packages are available + DISTRO_CODENAME="bullseye" + DISTRO_MAJOR_VERSION=11 + rv=11 + ;; *) DISTRO_CODENAME="stretch" ;; @@ -1914,10 +2010,6 @@ if [ "$ITYPE" = "git" ]; then if [ "$__NEW_VS_TAG_REGEX_MATCH" = "MATCH" ]; then _POST_NEON_INSTALL=$BS_TRUE __TAG_REGEX_MATCH="${__NEW_VS_TAG_REGEX_MATCH}" - if [ "$(echo "${GIT_REV}" | cut -c -1)" != "v" ]; then - # We do this to properly clone tags - GIT_REV="v${GIT_REV}" - fi echodebug "Post Neon Tag Regex Match On: ${GIT_REV}" else __TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed -E 's/^(v?[0-9]{1,4}\.[0-9]{1,2})(\.[0-9]{1,2})?.*$/MATCH/') @@ -1929,10 +2021,6 @@ if [ "$ITYPE" = "git" ]; then if [ "$__NEW_VS_TAG_REGEX_MATCH" = "MATCH" ]; then _POST_NEON_INSTALL=$BS_TRUE __TAG_REGEX_MATCH="${__NEW_VS_TAG_REGEX_MATCH}" - if [ "$(echo "${GIT_REV}" | cut -c -1)" != "v" ]; then - # We do this to properly clone tags - GIT_REV="v${GIT_REV}" - fi echodebug "Post Neon Tag Regex Match On: ${GIT_REV}" else __TAG_REGEX_MATCH=$(echo "${GIT_REV}" | sed 's/^.*\(v\?[[:digit:]]\{1,4\}\.[[:digit:]]\{1,2\}\)\(\.[[:digit:]]\{1,2\}\)\?.*$/MATCH/') @@ -2095,20 +2183,13 @@ __rpm_import_gpg() { #---------------------------------------------------------------------------------------------------------------------- __yum_install_noinput() { - ENABLE_EPEL_CMD="" - # Skip Amazon Linux for the first round, since EPEL is no longer required. - # See issue #724 - if [ $_DISABLE_REPOS -eq $BS_FALSE ] && [ "$DISTRO_NAME_L" != "amazon_linux_ami" ]; then - ENABLE_EPEL_CMD="--enablerepo=${_EPEL_REPO}" - fi - if [ "$DISTRO_NAME_L" = "oracle_linux" ]; then # We need to install one package at a time because --enablerepo=X disables ALL OTHER REPOS!!!! for package in "${@}"; do - yum -y install "${package}" || yum -y install "${package}" ${ENABLE_EPEL_CMD} || return $? + yum -y install "${package}" || yum -y install "${package}" || return $? done else - yum -y install "${@}" ${ENABLE_EPEL_CMD} || return $? + yum -y install "${@}" || return $? fi } # ---------- end of function __yum_install_noinput ---------- @@ -2121,6 +2202,15 @@ __dnf_install_noinput() { dnf -y install "${@}" || return $? } # ---------- end of function __dnf_install_noinput ---------- +#--- FUNCTION ------------------------------------------------------------------------------------------------------- +# NAME: __tdnf_install_noinput +# DESCRIPTION: (DRY) tdnf install with noinput options +#---------------------------------------------------------------------------------------------------------------------- +__tdnf_install_noinput() { + + tdnf -y install "${@}" || return $? +} # ---------- end of function __tdnf_install_noinput ---------- + #--- FUNCTION ------------------------------------------------------------------------------------------------------- # NAME: __git_clone_and_checkout # DESCRIPTION: (DRY) Helper function to clone and checkout salt to a @@ -2646,7 +2736,7 @@ __activate_virtualenv() { # NAME: __install_pip_pkgs # DESCRIPTION: Return 0 or 1 if successfully able to install pip packages. Can provide a different python version to # install pip packages with. If $py_ver is not specified it will use the default python version. -# PARAMETERS: pkgs, py_ver +# PARAMETERS: pkgs, py_ver, upgrade #---------------------------------------------------------------------------------------------------------------------- __install_pip_pkgs() { @@ -2815,15 +2905,15 @@ EOM fi echodebug "Running '${_pip_cmd} install wheel ${_setuptools_dep}'" - ${_pip_cmd} install ${_POST_NEON_PIP_INSTALL_ARGS} wheel "${_setuptools_dep}" + ${_pip_cmd} install --upgrade ${_POST_NEON_PIP_INSTALL_ARGS} wheel "${_setuptools_dep}" echoinfo "Installing salt using ${_py_exe}" cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1 mkdir /tmp/git/deps echoinfo "Downloading Salt Dependencies from PyPi" - echodebug "Running '${_pip_cmd} download -d /tmp/git/deps .'" - ${_pip_cmd} download -d /tmp/git/deps . || (echo "Failed to download salt dependencies" && return 1) + echodebug "Running '${_pip_cmd} download -d /tmp/git/deps ${_PIP_DOWNLOAD_ARGS} .'" + ${_pip_cmd} download -d /tmp/git/deps ${_PIP_DOWNLOAD_ARGS} . || (echo "Failed to download salt dependencies" && return 1) echoinfo "Installing Downloaded Salt Dependencies" echodebug "Running '${_pip_cmd} install --ignore-installed ${_POST_NEON_PIP_INSTALL_ARGS} /tmp/git/deps/*'" @@ -3057,9 +3147,19 @@ __install_saltstack_ubuntu_onedir_repository() { # SaltStack's stable Ubuntu repository: SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/${ONEDIR_REV}/" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + SALTSTACK_UBUNTU_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/ubuntu/${UBUNTU_VERSION}/${__REPO_ARCH}/" + fi echo "$__REPO_ARCH_DEB $SALTSTACK_UBUNTU_URL $UBUNTU_CODENAME main" > /etc/apt/sources.list.d/salt.list - __apt_key_fetch "${SALTSTACK_UBUNTU_URL}salt-archive-keyring.gpg" || return 1 + if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ]; then + __apt_key_fetch "${SALTSTACK_UBUNTU_URL}salt-archive-keyring.gpg" || return 1 + elif [ "$(echo "${ONEDIR_REV}" | grep -E '(latest|nightly)')" != "" ]; then + __apt_key_fetch "${SALTSTACK_UBUNTU_URL}salt-archive-keyring.gpg" || \ + __apt_key_fetch "${SALTSTACK_UBUNTU_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1 + else + __apt_key_fetch "${SALTSTACK_UBUNTU_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1 + fi __wait_for_apt apt-get update || return 1 } @@ -3318,7 +3418,15 @@ install_ubuntu_git() { _POST_NEON_PIP_INSTALL_ARGS="" __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1 cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1 - sed -i 's:/usr/bin:/usr/local/bin:g' pkg/*.service + + # Account for new path for services files in later releases + if [ -d "pkg/common" ]; then + _SERVICE_DIR="pkg/common" + else + _SERVICE_DIR="pkg" + fi + + sed -i 's:/usr/bin:/usr/local/bin:g' ${_SERVICE_DIR}/*.service return 0 fi @@ -3390,8 +3498,15 @@ install_ubuntu_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg" + fi + if [ -f /bin/systemctl ] && [ "$DISTRO_MAJOR_VERSION" -ge 16 ]; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" # Skip salt-api since the service should be opt-in and not necessarily started on boot [ $fname = "api" ] && continue @@ -3406,8 +3521,8 @@ install_ubuntu_git_post() { if [ ! -f $_upstart_conf ]; then # upstart does not know about our service, let's copy the proper file echowarn "Upstart does not appear to know about salt-$fname" - echodebug "Copying ${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-$fname.upstart to $_upstart_conf" - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.upstart" "$_upstart_conf" + echodebug "Copying ${_SERVICE_DIR}/salt-$fname.upstart to $_upstart_conf" + __copyfile "${_SERVICE_DIR}/salt-${fname}.upstart" "$_upstart_conf" # Set service to know about virtualenv if [ "${_VIRTUALENV_DIR}" != "null" ]; then echo "SALT_USE_VIRTUALENV=${_VIRTUALENV_DIR}" > /etc/default/salt-${fname} @@ -3579,9 +3694,19 @@ __install_saltstack_debian_onedir_repository() { # amd64 is just a part of repository URI, 32-bit pkgs are hosted under the same location SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/debian/${DEBIAN_RELEASE}/${__REPO_ARCH}/${ONEDIR_REV}/" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + SALTSTACK_DEBIAN_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/debian/${DEBIAN_RELEASE}/${__REPO_ARCH}/" + fi echo "$__REPO_ARCH_DEB $SALTSTACK_DEBIAN_URL $DEBIAN_CODENAME main" > "/etc/apt/sources.list.d/salt.list" - __apt_key_fetch "${SALTSTACK_DEBIAN_URL}salt-archive-keyring.gpg" || return 1 + if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ]; then + __apt_key_fetch "${SALTSTACK_DEBIAN_URL}salt-archive-keyring.gpg" || return 1 + elif [ "$(echo "${ONEDIR_REV}" | grep -E '(latest|nightly)')" != "" ]; then + __apt_key_fetch "${SALTSTACK_DEBIAN_URL}salt-archive-keyring.gpg" || \ + __apt_key_fetch "${SALTSTACK_DEBIAN_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1 + else + __apt_key_fetch "${SALTSTACK_DEBIAN_URL}SALT-PROJECT-GPG-PUBKEY-2023.gpg" || return 1 + fi __wait_for_apt apt-get update || return 1 } @@ -3940,7 +4065,15 @@ install_debian_git() { _POST_NEON_PIP_INSTALL_ARGS="" __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1 cd "${_SALT_GIT_CHECKOUT_DIR}" || return 1 - sed -i 's:/usr/bin:/usr/local/bin:g' pkg/*.service + + # Account for new path for services files in later releases + if [ -d "pkg/common" ]; then + _SERVICE_DIR="pkg/common" + else + _SERVICE_DIR="pkg" + fi + + sed -i 's:/usr/bin:/usr/local/bin:g' ${_SERVICE_DIR}/*.service return 0 fi @@ -3999,16 +4132,23 @@ install_debian_git_post() { [ "$fname" = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ "$fname" = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg" + fi + # Configure SystemD for Debian 8 "Jessie" and later if [ -f /bin/systemctl ]; then if [ ! -f /lib/systemd/system/salt-${fname}.service ] || \ { [ -f /lib/systemd/system/salt-${fname}.service ] && [ $_FORCE_OVERWRITE -eq $BS_TRUE ]; }; then - if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" ]; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" /lib/systemd/system - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.environment" "/etc/default/salt-${fname}" + if [ -f "${_SERVICE_DIR}/salt-${fname}.service" ]; then + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" /lib/systemd/system + __copyfile "${_SERVICE_DIR}/salt-${fname}.environment" "/etc/default/salt-${fname}" else # workaround before adding Debian-specific unit files to the Salt main repo - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" /lib/systemd/system + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" /lib/systemd/system sed -i -e '/^Type/ s/notify/simple/' /lib/systemd/system/salt-${fname}.service fi fi @@ -4103,6 +4243,41 @@ install_debian_check_services() { # Fedora Install Functions # +__install_saltstack_fedora_onedir_repository() { + if [ "$ITYPE" = "stable" ]; then + REPO_REV="$ONEDIR_REV" + else + REPO_REV="latest" + fi + + __PY_VERSION_REPO="yum" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + __PY_VERSION_REPO="py3" + fi + + GPG_KEY="SALT-PROJECT-GPG-PUBKEY-2023.pub" + + REPO_FILE="/etc/yum.repos.d/salt.repo" + + if [ ! -s "$REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then + FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/fedora/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/fedora/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/" + fi + + __fetch_url "${REPO_FILE}" "${FETCH_URL}.repo" + + __rpm_import_gpg "${FETCH_URL}/${GPG_KEY}" || return 1 + + yum clean metadata || return 1 + elif [ "$REPO_REV" != "latest" ]; then + echowarn "salt.repo already exists, ignoring salt version argument." + echowarn "Use -F (forced overwrite) to install $REPO_REV." + fi + + return 0 +} + install_fedora_deps() { if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then dnf -y update || return 1 @@ -4308,12 +4483,18 @@ install_fedora_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm" + fi + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" # Salt executables are located under `/usr/local/bin/` on Fedora 36+ - if [ "${DISTRO_VERSION}" -ge 36 ]; then - sed -i -e 's:/usr/bin/:/usr/local/bin/:g' /lib/systemd/system/salt-*.service - fi + #if [ "${DISTRO_VERSION}" -ge 36 ]; then + # sed -i -e 's:/usr/bin/:/usr/local/bin/:g' /lib/systemd/system/salt-*.service + #fi # Skip salt-api since the service should be opt-in and not necessarily started on boot [ $fname = "api" ] && continue @@ -4361,40 +4542,99 @@ install_fedora_check_services() { return 0 } -# -# Ended Fedora Install Functions -# -####################################################################################################################### -####################################################################################################################### -# -# CentOS Install Functions -# -__install_epel_repository() { - if [ ${_EPEL_REPOS_INSTALLED} -eq $BS_TRUE ]; then - return 0 +install_fedora_onedir_deps() { + + if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then + yum -y update || return 1 fi - # Check if epel repo is already enabled and flag it accordingly - if yum repolist | grep -q "^[!]\\?${_EPEL_REPO}/"; then - _EPEL_REPOS_INSTALLED=$BS_TRUE - return 0 + if [ "$_DISABLE_REPOS" -eq "$BS_TRUE" ] && [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + echowarn "Detected -r or -R option while installing Salt packages for Python 3." + echowarn "Python 3 packages for older Salt releases requires the EPEL repository to be installed." + echowarn "Installing the EPEL repository automatically is disabled when using the -r or -R options." + fi + + if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then + __install_saltstack_fedora_onedir_repository || return 1 + fi + + # If -R was passed, we need to configure custom repo url with rsync-ed packages + # Which is still handled in __install_saltstack_rhel_repository. This call has + # its own check in case -r was passed without -R. + if [ "$_CUSTOM_REPO_URL" != "null" ]; then + __install_saltstack_fedora_onedir_repository || return 1 + fi + + if [ "$DISTRO_MAJOR_VERSION" -ge 8 ]; then + __PACKAGES="dnf-utils chkconfig" + else + __PACKAGES="yum-utils chkconfig" fi - # Download latest 'epel-next-release' package for the distro version directly - epel_next_repo_url="${HTTP_VAL}://dl.fedoraproject.org/pub/epel/epel-next-release-latest-${DISTRO_MAJOR_VERSION}.noarch.rpm" + __PACKAGES="${__PACKAGES} procps" - # Download latest 'epel-release' package for the distro version directly - epel_repo_url="${HTTP_VAL}://dl.fedoraproject.org/pub/epel/epel-release-latest-${DISTRO_MAJOR_VERSION}.noarch.rpm" + # shellcheck disable=SC2086 + __yum_install_noinput ${__PACKAGES} || return 1 + + if [ "${_EXTRA_PACKAGES}" != "" ]; then + echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" + # shellcheck disable=SC2086 + __yum_install_noinput ${_EXTRA_PACKAGES} || return 1 + fi + + return 0 + +} + + +install_fedora_onedir() { + STABLE_REV=$ONEDIR_REV + #install_fedora_stable || return 1 + + __PACKAGES="" + + if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-cloud" + fi + if [ "$_INSTALL_MASTER" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-master" + fi + if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} salt-minion" + fi + if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-syndic" + fi + + # shellcheck disable=SC2086 + __yum_install_noinput ${__PACKAGES} || return 1 - yum -y install "${epel_next_repo_url}" "${epel_repo_url}" + return 0 +} - _EPEL_REPOS_INSTALLED=$BS_TRUE +install_fedora_onedir_post() { + STABLE_REV=$ONEDIR_REV + install_fedora_stable_post || return 1 return 0 } +# +# Ended Fedora Install Functions +# +####################################################################################################################### +####################################################################################################################### +# +# CentOS Install Functions +# __install_saltstack_rhel_repository() { + if [ "${DISTRO_MAJOR_VERSION}" -ge 9 ]; then + echoerror "Old stable repository unavailable on RH variants greater than or equal to 9" + echoerror "Use the stable install type." + exit 1 + fi + if [ "$ITYPE" = "stable" ]; then repo_rev="$STABLE_REV" else @@ -4465,10 +4705,17 @@ __install_saltstack_rhel_onedir_repository() { # Avoid using '$releasever' variable for yum. # Instead, this should work correctly on all RHEL variants. base_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/${ONEDIR_REV}/" - if [ "${DISTRO_MAJOR_VERSION}" -eq 9 ]; then - gpg_key="SALTSTACK-GPG-KEY2.pub" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + base_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/\$basearch/" + fi + if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ] || [ "${ONEDIR_REV}" = "nightly" ]; then + if [ "${DISTRO_MAJOR_VERSION}" -eq 9 ]; then + gpg_key="SALTSTACK-GPG-KEY2.pub" + else + gpg_key="SALTSTACK-GPG-KEY.pub" + fi else - gpg_key="SALTSTACK-GPG-KEY.pub" + gpg_key="SALT-PROJECT-GPG-PUBKEY-2023.pub" fi gpg_key_urls="" @@ -4491,6 +4738,9 @@ enabled_metadata=1 _eof fetch_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}/" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + fetch_url="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/redhat/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/" + fi for key in $gpg_key; do __rpm_import_gpg "${fetch_url}${key}" || return 1 done @@ -4516,7 +4766,6 @@ install_centos_stable_deps() { fi if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then - __install_epel_repository || return 1 __install_saltstack_rhel_repository || return 1 fi @@ -4558,6 +4807,8 @@ install_centos_stable_deps() { fi fi + __PACKAGES="${__PACKAGES} procps" + # shellcheck disable=SC2086 __yum_install_noinput ${__PACKAGES} || return 1 @@ -4590,6 +4841,13 @@ install_centos_stable() { # shellcheck disable=SC2086 __yum_install_noinput ${__PACKAGES} || return 1 + # Workaround for 3.11 broken on CentOS Stream 8.x + # Re-install Python 3.6 + _py_version=$(${_PY_EXE} -c "import sys; print('{0}.{1}'.format(*sys.version_info))") + if [ "$DISTRO_MAJOR_VERSION" -eq 8 ] && [ "${_py_version}" = "3.11" ]; then + __yum_install_noinput python3 + fi + return 0 } @@ -4625,7 +4883,14 @@ install_centos_stable_post() { } install_centos_git_deps() { - install_centos_stable_deps || return 1 + # First try stable deps then fall back to onedir deps if that one fails + # if we're installing on a Red Hat based host that doesn't have the classic + # package repos available. + # Set ONEDIR_REV to STABLE_REV in case we + # end up calling install_centos_onedir_deps + ONEDIR_REV=${STABLE_REV} + install_centos_onedir_deps || \ + return 1 if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then __yum_install_noinput ca-certificates || return 1 @@ -4785,10 +5050,16 @@ install_centos_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_FILE="${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" + else + _SERVICE_FILE="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service" + fi if [ -f /bin/systemctl ]; then if [ ! -f "/usr/lib/systemd/system/salt-${fname}.service" ] || \ { [ -f "/usr/lib/systemd/system/salt-${fname}.service" ] && [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; }; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service" /usr/lib/systemd/system + __copyfile "${_SERVICE_FILE}" /usr/lib/systemd/system fi SYSTEMD_RELOAD=$BS_TRUE @@ -4820,7 +5091,6 @@ install_centos_onedir_deps() { fi if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then - __install_epel_repository || return 1 __install_saltstack_rhel_onedir_repository || return 1 fi @@ -4837,6 +5107,8 @@ install_centos_onedir_deps() { __PACKAGES="yum-utils chkconfig" fi + __PACKAGES="${__PACKAGES} procps" + # shellcheck disable=SC2086 __yum_install_noinput ${__PACKAGES} || return 1 @@ -5344,6 +5616,11 @@ install_oracle_linux_git_post() { return 0 } +install_oracle_linux_onedir_post() { + install_centos_onedir_post || return 1 + return 0 +} + install_oracle_linux_testing_post() { install_centos_testing_post || return 1 return 0 @@ -5417,6 +5694,11 @@ install_almalinux_git_post() { return 0 } +install_almalinux_onedir_post() { + install_centos_onedir_post || return 1 + return 0 +} + install_almalinux_testing_post() { install_centos_testing_post || return 1 return 0 @@ -5490,6 +5772,11 @@ install_rocky_linux_git_post() { return 0 } +install_rocky_linux_onedir_post() { + install_centos_onedir_post || return 1 + return 0 +} + install_rocky_linux_testing_post() { install_centos_testing_post || return 1 return 0 @@ -5563,6 +5850,11 @@ install_scientific_linux_git_post() { return 0 } +install_scientific_linux_onedir_post() { + install_centos_onedir_post || return 1 + return 0 +} + install_scientific_linux_testing_post() { install_centos_testing_post || return 1 return 0 @@ -6206,9 +6498,17 @@ install_amazon_linux_ami_2_onedir_deps() { fi base_url="$HTTP_VAL://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/amazon/2/\$basearch/$repo_rev/" - gpg_key="${base_url}SALTSTACK-GPG-KEY.pub,${base_url}base/RPM-GPG-KEY-CentOS-7" - if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + if [ "${ONEDIR_REV}" = "nightly" ] ; then + base_url="$HTTP_VAL://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/amazon/2/\$basearch/" + fi + + if [ "$(echo "${ONEDIR_REV}" | grep -E '(3004|3005)')" != "" ] || [ "${ONEDIR_REV}" = "nightly" ]; then + gpg_key="${base_url}SALTSTACK-GPG-KEY.pub,${base_url}base/RPM-GPG-KEY-CentOS-7" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then gpg_key="${base_url}SALTSTACK-GPG-KEY.pub" + fi + else + gpg_key="${base_url}SALT-PROJECT-GPG-PUBKEY-2023.pub" fi # This should prob be refactored to use __install_saltstack_rhel_repository() @@ -6427,6 +6727,10 @@ install_arch_linux_git_deps() { return 0 } +install_arch_linux_onedir_deps() { + install_arch_linux_stable_deps || return 1 +} + install_arch_linux_stable() { # Pacman does not resolve dependencies on outdated versions # They always need to be updated @@ -6445,6 +6749,8 @@ install_arch_linux_stable() { install_arch_linux_git() { + _POST_NEON_PIP_INSTALL_ARGS="${_POST_NEON_PIP_INSTALL_ARGS} --use-pep517" + _PIP_DOWNLOAD_ARGS="${_PIP_DOWNLOAD_ARGS} --use-pep517" if [ "${_POST_NEON_INSTALL}" -eq $BS_TRUE ]; then __install_salt_from_repo_post_neon "${_PY_EXE}" || return 1 return 0 @@ -6502,8 +6808,15 @@ install_arch_linux_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm" + fi + if [ -f /usr/bin/systemctl ]; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" # Skip salt-api since the service should be opt-in and not necessarily started on boot [ $fname = "api" ] && continue @@ -6570,6 +6883,18 @@ install_arch_check_services() { return 0 } + +install_arch_linux_onedir() { + install_arch_linux_stable || return 1 + + return 0 +} + +install_arch_linux_onedir_post() { + install_arch_linux_post || return 1 + + return 0 +} # # Ended Arch Install Functions # @@ -6577,53 +6902,374 @@ install_arch_check_services() { ####################################################################################################################### # -# FreeBSD Install Functions +# Photon OS Install Functions # -# Using a separate conf step to head for idempotent install... -__configure_freebsd_pkg_details() { - _SALT_ETC_DIR="/usr/local/etc/salt" - _PKI_DIR=${_SALT_ETC_DIR}/pki - _POST_NEON_PIP_INSTALL_ARGS="--prefix=/usr/local" -} +__install_saltstack_photon_onedir_repository() { + if [ "$ITYPE" = "stable" ]; then + REPO_REV="$ONEDIR_REV" + else + REPO_REV="latest" + fi -install_freebsd_deps() { - __configure_freebsd_pkg_details - pkg install -y pkg -} + __PY_VERSION_REPO="yum" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + __PY_VERSION_REPO="py3" + fi -install_freebsd_git_deps() { - install_freebsd_deps || return 1 + REPO_FILE="/etc/yum.repos.d/salt.repo" - if ! __check_command_exists git; then - /usr/local/sbin/pkg install -y git || return 1 - fi - __git_clone_and_checkout || return 1 + if [ ! -s "$REPO_FILE" ] || [ "$_FORCE_OVERWRITE" -eq $BS_TRUE ]; then + FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/photon/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/${ONEDIR_REV}" + if [ "${ONEDIR_REV}" = "nightly" ] ; then + FETCH_URL="${HTTP_VAL}://${_REPO_URL}/${_ONEDIR_NIGHTLY_DIR}/${__PY_VERSION_REPO}/photon/${DISTRO_MAJOR_VERSION}/${CPU_ARCH_L}/" + fi - if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then + __fetch_url "${REPO_FILE}" "${FETCH_URL}.repo" - SALT_DEPENDENCIES=$(/usr/local/sbin/pkg rquery %dn py39-salt) - # shellcheck disable=SC2086 - /usr/local/sbin/pkg install -y ${SALT_DEPENDENCIES} python || return 1 + GPG_KEY="SALT-PROJECT-GPG-PUBKEY-2023.pub" - /usr/local/sbin/pkg install -y py39-requests || return 1 - /usr/local/sbin/pkg install -y py39-tornado4 || return 1 + __rpm_import_gpg "${FETCH_URL}/${GPG_KEY}" || return 1 - else - /usr/local/sbin/pkg install -y python py39-pip py39-setuptools libzmq4 libunwind || return 1 + tdnf makecache || return 1 + elif [ "$REPO_REV" != "latest" ]; then + echowarn "salt.repo already exists, ignoring salt version argument." + echowarn "Use -F (forced overwrite) to install $REPO_REV." fi - echodebug "Adapting paths to FreeBSD" - # The list of files was taken from Salt's BSD port Makefile - for file in doc/man/salt-key.1 doc/man/salt-cp.1 doc/man/salt-minion.1 \ - doc/man/salt-syndic.1 doc/man/salt-master.1 doc/man/salt-run.1 \ - doc/man/salt.7 doc/man/salt.1 doc/man/salt-call.1; do - [ ! -f $file ] && continue - echodebug "Patching ${file}" - sed -in -e "s|/etc/salt|${_SALT_ETC_DIR}|" \ - -e "s|/srv/salt|${_SALT_ETC_DIR}/states|" \ - -e "s|/srv/pillar|${_SALT_ETC_DIR}/pillar|" ${file} - done + return 0 +} + +install_photon_deps() { + if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then + tdnf -y update || return 1 + fi + + __PACKAGES="${__PACKAGES:=}" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -lt 3 ]; then + echoerror "There are no Python 2 stable packages for Fedora, only Py3 packages" + return 1 + fi + + PY_PKG_VER=3 + + __PACKAGES="${__PACKAGES} libyaml procps-ng python${PY_PKG_VER}-crypto python${PY_PKG_VER}-jinja2" + __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-msgpack python${PY_PKG_VER}-requests python${PY_PKG_VER}-zmq" + __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-pip python${PY_PKG_VER}-m2crypto python${PY_PKG_VER}-pyyaml" + __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-systemd" + if [ "${_EXTRA_PACKAGES}" != "" ]; then + echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" + fi + + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} ${_EXTRA_PACKAGES} || return 1 + + return 0 +} + +install_photon_stable_post() { + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service) + sleep 1 + systemctl daemon-reload + done +} + +install_photon_git_deps() { + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + # Packages are named python3- + PY_PKG_VER=3 + else + PY_PKG_VER=2 + fi + + __PACKAGES="" + if ! __check_command_exists ps; then + __PACKAGES="${__PACKAGES} procps-ng" + fi + if ! __check_command_exists git; then + __PACKAGES="${__PACKAGES} git" + fi + + if [ -n "${__PACKAGES}" ]; then + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} || return 1 + __PACKAGES="" + fi + + __git_clone_and_checkout || return 1 + + if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then + + if [ "$_INSECURE_DL" -eq $BS_FALSE ] && [ "${_SALT_REPO_URL%%://*}" = "https" ]; then + __PACKAGES="${__PACKAGES} ca-certificates" + fi + if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} python${PY_PKG_VER}-libcloud python${PY_PKG_VER}-netaddr" + fi + + install_photon_deps || return 1 + + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + if __check_command_exists python3; then + __python="python3" + fi + elif [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 2 ]; then + if __check_command_exists python2; then + __python="python2" + fi + else + if ! __check_command_exists python; then + echoerror "Unable to find a python binary?!" + return 1 + fi + # Let's hope it's the right one + __python="python" + fi + + grep tornado "${_SALT_GIT_CHECKOUT_DIR}/requirements/base.txt" | while IFS=' + ' read -r dep; do + echodebug "Running '${__python}' -m pip install '${dep}'" + "${__python}" -m pip install "${dep}" || return 1 + done + else + __PACKAGES="python${PY_PKG_VER}-devel python${PY_PKG_VER}-pip python${PY_PKG_VER}-setuptools gcc glibc-devel linux-devel.x86_64" + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} || return 1 + fi + + if [ "${DISTRO_MAJOR_VERSION}" -gt 3 ]; then + # Need newer version of setuptools on Photon + _setuptools_dep="setuptools>=${_MINIMUM_SETUPTOOLS_VERSION}" + echodebug "Running '${_PY_EXE} -m pip --upgrade install ${_setuptools_dep}'" + ${_PY_EXE} -m pip install --upgrade "${_setuptools_dep}" + fi + + # Let's trigger config_salt() + if [ "$_TEMP_CONFIG_DIR" = "null" ]; then + _TEMP_CONFIG_DIR="${_SALT_GIT_CHECKOUT_DIR}/conf/" + CONFIG_SALT_FUNC="config_salt" + fi + + return 0 +} + +install_photon_git() { + if [ "${_PY_EXE}" != "" ]; then + _PYEXE=${_PY_EXE} + echoinfo "Using the following python version: ${_PY_EXE} to install salt" + else + _PYEXE='python2' + fi + + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/salt/syspaths.py" ]; then + ${_PYEXE} setup.py --salt-config-dir="$_SALT_ETC_DIR" --salt-cache-dir="${_SALT_CACHE_DIR}" ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1 + else + ${_PYEXE} setup.py ${SETUP_PY_INSTALL_ARGS} install --prefix=/usr || return 1 + fi + return 0 +} + +install_photon_git_post() { + for fname in api master minion syndic; do + # Skip if not meant to be installed + [ $fname = "api" ] && \ + ([ "$_INSTALL_MASTER" -eq $BS_FALSE ] || ! __check_command_exists "salt-${fname}") && continue + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/rpm" + fi + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + + # Salt executables are located under `/usr/local/bin/` on Fedora 36+ + #if [ "${DISTRO_VERSION}" -ge 36 ]; then + # sed -i -e 's:/usr/bin/:/usr/local/bin/:g' /lib/systemd/system/salt-*.service + #fi + + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + systemctl is-enabled salt-$fname.service || (systemctl preset salt-$fname.service && systemctl enable salt-$fname.service) + sleep 1 + systemctl daemon-reload + done +} + +install_photon_restart_daemons() { + [ $_START_DAEMONS -eq $BS_FALSE ] && return + + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + systemctl stop salt-$fname > /dev/null 2>&1 + systemctl start salt-$fname.service && continue + echodebug "Failed to start salt-$fname using systemd" + if [ "$_ECHO_DEBUG" -eq $BS_TRUE ]; then + systemctl status salt-$fname.service + journalctl -xe + fi + done +} + +install_photon_check_services() { + for fname in api master minion syndic; do + # Skip salt-api since the service should be opt-in and not necessarily started on boot + [ $fname = "api" ] && continue + + # Skip if not meant to be installed + [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue + [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue + [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + + __check_services_systemd salt-$fname || return 1 + done + + return 0 +} + +install_photon_onedir_deps() { + + if [ "$_UPGRADE_SYS" -eq $BS_TRUE ]; then + tdnf -y update || return 1 + fi + + if [ "$_DISABLE_REPOS" -eq "$BS_TRUE" ] && [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + echowarn "Detected -r or -R option while installing Salt packages for Python 3." + echowarn "Python 3 packages for older Salt releases requires the EPEL repository to be installed." + echowarn "Installing the EPEL repository automatically is disabled when using the -r or -R options." + fi + + if [ "$_DISABLE_REPOS" -eq "$BS_FALSE" ]; then + __install_saltstack_photon_onedir_repository || return 1 + fi + + # If -R was passed, we need to configure custom repo url with rsync-ed packages + # Which is still handled in __install_saltstack_rhel_repository. This call has + # its own check in case -r was passed without -R. + if [ "$_CUSTOM_REPO_URL" != "null" ]; then + __install_saltstack_photon_onedir_repository || return 1 + fi + + __PACKAGES="procps-ng" + + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} || return 1 + + if [ "${_EXTRA_PACKAGES}" != "" ]; then + echoinfo "Installing the following extra packages as requested: ${_EXTRA_PACKAGES}" + # shellcheck disable=SC2086 + __tdnf_install_noinput ${_EXTRA_PACKAGES} || return 1 + fi + + return 0 + +} + + +install_photon_onedir() { + STABLE_REV=$ONEDIR_REV + + __PACKAGES="" + + if [ "$_INSTALL_CLOUD" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-cloud" + fi + if [ "$_INSTALL_MASTER" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-master" + fi + if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then + __PACKAGES="${__PACKAGES} salt-minion" + fi + if [ "$_INSTALL_SYNDIC" -eq $BS_TRUE ];then + __PACKAGES="${__PACKAGES} salt-syndic" + fi + + # shellcheck disable=SC2086 + __tdnf_install_noinput ${__PACKAGES} || return 1 + + return 0 +} + +install_photon_onedir_post() { + STABLE_REV=$ONEDIR_REV + install_photon_stable_post || return 1 + + return 0 +} +# +# Ended Fedora Install Functions +# +####################################################################################################################### + +####################################################################################################################### +# +# FreeBSD Install Functions +# + +# Using a separate conf step to head for idempotent install... +__configure_freebsd_pkg_details() { + _SALT_ETC_DIR="/usr/local/etc/salt" + _PKI_DIR=${_SALT_ETC_DIR}/pki + _POST_NEON_PIP_INSTALL_ARGS="--prefix=/usr/local" +} + +install_freebsd_deps() { + __configure_freebsd_pkg_details + pkg install -y pkg +} + +install_freebsd_git_deps() { + install_freebsd_deps || return 1 + + if ! __check_command_exists git; then + /usr/local/sbin/pkg install -y git || return 1 + fi + __git_clone_and_checkout || return 1 + + if [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then + + SALT_DEPENDENCIES=$(/usr/local/sbin/pkg rquery %dn py39-salt) + # shellcheck disable=SC2086 + /usr/local/sbin/pkg install -y ${SALT_DEPENDENCIES} python || return 1 + + /usr/local/sbin/pkg install -y py39-requests || return 1 + /usr/local/sbin/pkg install -y py39-tornado4 || return 1 + + else + /usr/local/sbin/pkg install -y python py39-pip py39-setuptools libzmq4 libunwind || return 1 + fi + + echodebug "Adapting paths to FreeBSD" + # The list of files was taken from Salt's BSD port Makefile + for file in doc/man/salt-key.1 doc/man/salt-cp.1 doc/man/salt-minion.1 \ + doc/man/salt-syndic.1 doc/man/salt-master.1 doc/man/salt-run.1 \ + doc/man/salt.7 doc/man/salt.1 doc/man/salt-call.1; do + [ ! -f $file ] && continue + echodebug "Patching ${file}" + sed -in -e "s|/etc/salt|${_SALT_ETC_DIR}|" \ + -e "s|/srv/salt|${_SALT_ETC_DIR}/states|" \ + -e "s|/srv/pillar|${_SALT_ETC_DIR}/pillar|" ${file} + done if [ ! -f salt/syspaths.py ]; then # We still can't provide the system paths, salt 0.16.x # Let's patch salt's source and adapt paths to what's expected on FreeBSD @@ -6748,6 +7394,15 @@ install_freebsd_restart_daemons() { service salt_$fname start done } + +install_freebsd_onedir() { +# +# call install_freebsd_stable +# + install_freebsd_stable || return 1 + + return 0 +} # # Ended FreeBSD Install Functions # @@ -6866,6 +7521,14 @@ install_openbsd_restart_daemons() { return 0 } +install_openbsd_onedir() { +# +# Call install_openbsd_stable +# + install_openbsd_stable || return 1 + + return 0 +} # # Ended OpenBSD Install Functions # @@ -7066,6 +7729,14 @@ install_smartos_restart_daemons() { return 0 } +install_smartos_onedir() { +# +# call install_smartos_stable +# + install_smartos_stable || return 1 + + return 0 +} # # Ended SmartOS Install Functions # @@ -7090,13 +7761,8 @@ __set_suse_pkg_repo() { DISTRO_REPO="SLE_${DISTRO_MAJOR_VERSION}_SP${SUSE_PATCHLEVEL}" fi - if [ "$_DOWNSTREAM_PKG_REPO" -eq $BS_TRUE ]; then - suse_pkg_url_base="https://download.opensuse.org/repositories/systemsmanagement:/saltstack" - suse_pkg_url_path="${DISTRO_REPO}/systemsmanagement:saltstack.repo" - else - suse_pkg_url_base="${HTTP_VAL}://repo.saltproject.io/opensuse" - suse_pkg_url_path="${DISTRO_REPO}/systemsmanagement:saltstack:products.repo" - fi + suse_pkg_url_base="https://download.opensuse.org/repositories/systemsmanagement:/saltstack" + suse_pkg_url_path="${DISTRO_REPO}/systemsmanagement:saltstack.repo" SUSE_PKG_URL="$suse_pkg_url_base/$suse_pkg_url_path" } @@ -7116,7 +7782,7 @@ __version_lte() { zypper --non-interactive install --auto-agree-with-licenses python || return 1 fi - if [ "$(python -c 'import sys; V1=tuple([int(i) for i in sys.argv[1].split(".")]); V2=tuple([int(i) for i in sys.argv[2].split(".")]); print V1<=V2' "$1" "$2")" = "True" ]; then + if [ "$(${_PY_EXE} -c 'import sys; V1=tuple([int(i) for i in sys.argv[1].split(".")]); V2=tuple([int(i) for i in sys.argv[2].split(".")]); print(V1<=V2)' "$1" "$2")" = "True" ]; then __ZYPPER_REQUIRES_REPLACE_FILES=${BS_TRUE} else __ZYPPER_REQUIRES_REPLACE_FILES=${BS_FALSE} @@ -7233,7 +7899,7 @@ install_opensuse_git_deps() { fi # Check for Tumbleweed elif [ "${DISTRO_MAJOR_VERSION}" -ge 20210101 ]; then - __PACKAGES="python3-pip gcc-c++ python310-pyzmq-devel" + __PACKAGES="python3-pip gcc-c++ python3-pyzmq-devel" else __PACKAGES="python-pip python-setuptools gcc" fi @@ -7250,6 +7916,10 @@ install_opensuse_git_deps() { return 0 } +install_opensuse_onedir_deps() { + install_opensuse_stable_deps || return 1 +} + install_opensuse_stable() { __PACKAGES="" @@ -7282,6 +7952,10 @@ install_opensuse_git() { return 0 } +install_opensuse_onedir() { + install_opensuse_stable || return 1 +} + install_opensuse_stable_post() { for fname in api master minion syndic; do # Skip salt-api since the service should be opt-in and not necessarily started on boot @@ -7326,10 +8000,17 @@ install_opensuse_git_post() { use_usr_lib=$BS_TRUE fi + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/" + fi + if [ "${use_usr_lib}" -eq $BS_TRUE ]; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" "/usr/lib/systemd/system/salt-${fname}.service" + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/usr/lib/systemd/system/salt-${fname}.service" else - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" fi continue @@ -7344,6 +8025,10 @@ install_opensuse_git_post() { return 0 } +install_opensuse_onedir_post() { + install_opensuse_stable_post || return 1 +} + install_opensuse_restart_daemons() { [ $_START_DAEMONS -eq $BS_FALSE ] && return @@ -7503,6 +8188,11 @@ install_opensuse_15_git() { return 0 } +install_opensuse_15_onedir_deps() { + __opensuse_prep_install || return 1 + return 0 +} + # # End of openSUSE Leap 15 # @@ -7532,6 +8222,13 @@ install_suse_15_git_deps() { return 0 } +install_suse_15_onedir_deps() { + __opensuse_prep_install || return 1 + install_opensuse_15_onedir_deps || return 1 + + return 0 +} + install_suse_15_stable() { install_opensuse_stable || return 1 return 0 @@ -7542,6 +8239,11 @@ install_suse_15_git() { return 0 } +install_suse_15_onedir() { + install_opensuse_stable || return 1 + return 0 +} + install_suse_15_stable_post() { install_opensuse_stable_post || return 1 return 0 @@ -7552,6 +8254,11 @@ install_suse_15_git_post() { return 0 } +install_suse_15_onedir_post() { + install_opensuse_stable_post || return 1 + return 0 +} + install_suse_15_restart_daemons() { install_opensuse_restart_daemons || return 1 return 0 @@ -7634,6 +8341,11 @@ install_suse_12_git_deps() { return 0 } +install_suse_12_onedir_deps() { + install_suse_12_stable_deps || return 1 + return 0 +} + install_suse_12_stable() { install_opensuse_stable || return 1 return 0 @@ -7644,6 +8356,11 @@ install_suse_12_git() { return 0 } +install_suse_12_onedir() { + install_opensuse_stable || return 1 + return 0 +} + install_suse_12_stable_post() { install_opensuse_stable_post || return 1 return 0 @@ -7654,6 +8371,11 @@ install_suse_12_git_post() { return 0 } +install_suse_12_onedir_post() { + install_opensuse_stable_post || return 1 + return 0 +} + install_suse_12_restart_daemons() { install_opensuse_restart_daemons || return 1 return 0 @@ -7730,6 +8452,11 @@ install_suse_11_git_deps() { return 0 } +install_suse_11_onedir_deps() { + install_suse_11_stable_deps || return 1 + return 0 +} + install_suse_11_stable() { install_opensuse_stable || return 1 return 0 @@ -7740,6 +8467,11 @@ install_suse_11_git() { return 0 } +install_suse_11_onedir() { + install_opensuse_stable || return 1 + return 0 +} + install_suse_11_stable_post() { install_opensuse_stable_post || return 1 return 0 @@ -7750,6 +8482,11 @@ install_suse_11_git_post() { return 0 } +install_suse_11_onedir_post() { + install_opensuse_stable_post || return 1 + return 0 +} + install_suse_11_restart_daemons() { install_opensuse_restart_daemons || return 1 return 0 @@ -7849,11 +8586,6 @@ __gentoo_pre_dep() { mkdir /etc/portage fi - # Enable Python 3.6 target for pre Neon Salt release - if echo "${STABLE_REV}" | grep -q "2019" || [ "${ITYPE}" = "git" ] && [ "${_POST_NEON_INSTALL}" -eq $BS_FALSE ]; then - EXTRA_PYTHON_TARGET=python3_6 - fi - # Enable Python 3.7 target for Salt Neon using GIT if [ "${ITYPE}" = "git" ] && [ "${GIT_REV}" = "v3000" ]; then EXTRA_PYTHON_TARGET=python3_7 @@ -7949,6 +8681,9 @@ install_gentoo_git_deps() { __emerge ${GENTOO_GIT_PACKAGES} || return 1 fi + echoinfo "Running emerge -v1 setuptools" + __emerge -v1 setuptools || return 1 + __git_clone_and_checkout || return 1 __gentoo_post_dep || return 1 } @@ -7996,6 +8731,11 @@ install_gentoo_git() { return 0 } +install_gentoo_onedir() { + STABLE_REV=${ONEDIR_REV} + install_gentoo_stable || return 1 +} + install_gentoo_post() { for fname in api master minion syndic; do # Skip salt-api since the service should be opt-in and not necessarily started on boot @@ -8031,8 +8771,15 @@ install_gentoo_git_post() { [ $fname = "minion" ] && [ "$_INSTALL_MINION" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue + # Account for new path for services files in later releases + if [ -f "${_SALT_GIT_CHECKOUT_DIR}/pkg/common/salt-${fname}.service" ]; then + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg/common" + else + _SERVICE_DIR="${_SALT_GIT_CHECKOUT_DIR}/pkg" + fi + if __check_command_exists systemctl ; then - __copyfile "${_SALT_GIT_CHECKOUT_DIR}/pkg/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" + __copyfile "${_SERVICE_DIR}/salt-${fname}.service" "/lib/systemd/system/salt-${fname}.service" # Skip salt-api since the service should be opt-in and not necessarily started on boot [ $fname = "api" ] && continue @@ -8078,6 +8825,10 @@ _eof return 0 } +install_gentoo_onedir_post() { + install_gentoo_post || return 1 +} + install_gentoo_restart_daemons() { [ $_START_DAEMONS -eq $BS_FALSE ] && return @@ -8229,7 +8980,46 @@ __macosx_get_packagesite() { fi PKG="salt-${STABLE_REV}-${__PY_VERSION_REPO}-${DARWIN_ARCH}.pkg" - SALTPKGCONFURL="https://repo.saltproject.io/osx/${PKG}" + SALTPKGCONFURL="https://${_REPO_URL}/osx/${PKG}" +} + +__parse_repo_json_python() { + + # Using latest, grab the right + # version from the repo.json + _JSON_VERSION=$(python - <<-EOF +import json, urllib.request +url = "https://repo.saltproject.io/salt/py3/macos/repo.json" +response = urllib.request.urlopen(url) +data = json.loads(response.read()) +version = data["${_ONEDIR_REV}"][list(data["${_ONEDIR_REV}"])[0]]['version'] +print(version) +EOF +) +echo "${_JSON_VERSION}" +} + +__macosx_get_packagesite_onedir() { + DARWIN_ARCH="x86_64" + + __PY_VERSION_REPO="py2" + if [ -n "$_PY_EXE" ] && [ "$_PY_MAJOR_VERSION" -eq 3 ]; then + __PY_VERSION_REPO="py3" + fi + + if [ "$(echo "$_ONEDIR_REV" | grep -E '^(latest)$')" != "" ]; then + _PKG_VERSION=$(__parse_repo_json_python) + elif [ "$(echo "$_ONEDIR_REV" | grep -E '^([3-9][0-9]{3}(\.[0-9]*))')" != "" ]; then + _PKG_VERSION=$_ONEDIR_REV + else + _PKG_VERSION=$(__parse_repo_json_python) + fi + if [ "$(echo "$_ONEDIR_REV" | grep -E '^(3005)')" != "" ]; then + PKG="salt-${_PKG_VERSION}-macos-${DARWIN_ARCH}.pkg" + else + PKG="salt-${_PKG_VERSION}-${__PY_VERSION_REPO}-${DARWIN_ARCH}.pkg" + fi + SALTPKGCONFURL="https://${_REPO_URL}/${_ONEDIR_DIR}/${__PY_VERSION_REPO}/macos/${ONEDIR_REV}/${PKG}" } # Using a separate conf step to head for idempotent install... @@ -8238,11 +9028,21 @@ __configure_macosx_pkg_details() { return 0 } +__configure_macosx_pkg_details_onedir() { + __macosx_get_packagesite_onedir || return 1 + return 0 +} + install_macosx_stable_deps() { __configure_macosx_pkg_details || return 1 return 0 } +install_macosx_onedir_deps() { + __configure_macosx_pkg_details_onedir || return 1 + return 0 +} + install_macosx_git_deps() { install_macosx_stable_deps || return 1 @@ -8289,6 +9089,16 @@ install_macosx_stable() { return 0 } +install_macosx_onedir() { + install_macosx_onedir_deps || return 1 + + __fetch_url "/tmp/${PKG}" "${SALTPKGCONFURL}" || return 1 + + /usr/sbin/installer -pkg "/tmp/${PKG}" -target / || return 1 + + return 0 +} + install_macosx_git() { if [ -n "$_PY_EXE" ]; then @@ -8326,6 +9136,11 @@ install_macosx_stable_post() { return 0 } +install_macosx_onedir_post() { + install_macosx_stable_post || return 1 + return 0 +} + install_macosx_git_post() { install_macosx_stable_post || return 1 return 0 @@ -8334,8 +9149,15 @@ install_macosx_git_post() { install_macosx_restart_daemons() { [ $_START_DAEMONS -eq $BS_FALSE ] && return - /bin/launchctl unload -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1 - /bin/launchctl load -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1 + if [ "$_INSTALL_MINION" -eq $BS_TRUE ]; then + /bin/launchctl unload -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1 + /bin/launchctl load -w /Library/LaunchDaemons/com.saltstack.salt.minion.plist || return 1 + fi + + if [ "$_INSTALL_MASTER" -eq $BS_TRUE ]; then + /bin/launchctl unload -w /Library/LaunchDaemons/com.saltstack.salt.master.plist || return 1 + /bin/launchctl load -w /Library/LaunchDaemons/com.saltstack.salt.master.plist || return 1 + fi return 0 } @@ -8554,7 +9376,11 @@ daemons_running_onedir() { [ $fname = "master" ] && [ "$_INSTALL_MASTER" -eq $BS_FALSE ] && continue [ $fname = "syndic" ] && [ "$_INSTALL_SYNDIC" -eq $BS_FALSE ] && continue - salt_path="/opt/saltstack/salt/run/run ${fname}" + if [ -f "/opt/saltstack/salt/run/run" ]; then + salt_path="/opt/saltstack/salt/run/run ${fname}" + else + salt_path="salt-${fname}" + fi process_running=$(pgrep -f "${salt_path}") if [ "${process_running}" = "" ]; then echoerror "${salt_path} was not found running" @@ -8912,6 +9738,11 @@ if [ "$DAEMONS_RUNNING_FUNC" != "null" ] && [ ${_START_DAEMONS} -eq $BS_TRUE ]; fi fi +if [ "$_AUTO_ACCEPT_MINION_KEYS" -eq "$BS_TRUE" ]; then + echoinfo "Accepting the Salt Minion Keys" + salt-key -yA +fi + # Done! if [ "$_CONFIG_ONLY" -eq $BS_FALSE ]; then echoinfo "Salt installed!" @@ -8919,6 +9750,13 @@ else echoinfo "Salt configured!" fi +if [ "$_QUICK_START" -eq "$BS_TRUE" ]; then + echoinfo "Congratulations!" + echoinfo "A couple of commands to try:" + echoinfo " salt \* test.ping" + echoinfo " salt \* test.version" +fi + exit 0 # vim: set sts=4 ts=4 et diff --git a/salt/config/__init__.py b/salt/config/__init__.py index 58a3ff8584ae..f946bc7f010d 100644 --- a/salt/config/__init__.py +++ b/salt/config/__init__.py @@ -49,6 +49,8 @@ _DFLT_REFSPECS = ["+refs/heads/*:refs/remotes/origin/*", "+refs/tags/*:refs/tags/*"] DEFAULT_INTERVAL = 60 +DEFAULT_HASH_TYPE = "sha256" + if salt.utils.platform.is_windows(): # Since an 'ipc_mode' of 'ipc' will never work on Windows due to lack of @@ -373,7 +375,7 @@ def _gather_buffer_space(): # applications that depend on the original format. "unique_jid": bool, # Governs whether state runs will queue or fail to run when a state is already running - "state_queue": bool, + "state_queue": (bool, int), # Tells the highstate outputter to show successful states. False will omit successes. "state_verbose": bool, # Specify the format for state outputs. See highstate outputter for additional details. @@ -987,6 +989,12 @@ def _gather_buffer_space(): "pass_gnupghome": str, # pass renderer: Set PASSWORD_STORE_DIR env for Pass "pass_dir": str, + # Maintenence process restart interval + "maintenance_interval": int, + # Fileserver process restart interval + "fileserver_interval": int, + "request_channel_timeout": int, + "request_channel_tries": int, } ) @@ -1048,6 +1056,8 @@ def _gather_buffer_space(): "pillar_cache": False, "pillar_cache_ttl": 3600, "pillar_cache_backend": "disk", + "request_channel_timeout": 30, + "request_channel_tries": 3, "gpg_cache": False, "gpg_cache_ttl": 86400, "gpg_cache_backend": "disk", @@ -1131,7 +1141,7 @@ def _gather_buffer_space(): "gitfs_refspecs": _DFLT_REFSPECS, "gitfs_disable_saltenv_mapping": False, "unique_jid": False, - "hash_type": "sha256", + "hash_type": DEFAULT_HASH_TYPE, "optimization_order": [0, 1, 2], "disable_modules": [], "disable_returners": [], @@ -1456,7 +1466,7 @@ def _gather_buffer_space(): "fileserver_ignoresymlinks": False, "fileserver_verify_config": True, "max_open_files": 100000, - "hash_type": "sha256", + "hash_type": DEFAULT_HASH_TYPE, "optimization_order": [0, 1, 2], "conf_file": os.path.join(salt.syspaths.CONFIG_DIR, "master"), "open_mode": False, @@ -1635,6 +1645,8 @@ def _gather_buffer_space(): "pass_gnupghome": "", "pass_dir": "", "netapi_enable_clients": [], + "maintenance_interval": 3600, + "fileserver_interval": 3600, } ) @@ -2273,6 +2285,8 @@ def minion_config( """ if defaults is None: defaults = DEFAULT_MINION_OPTS.copy() + if role == "master": + defaults["default_include"] = DEFAULT_MASTER_OPTS["default_include"] if not os.environ.get(env_var, None): # No valid setting was given using the configuration variable. diff --git a/salt/engines/slack_bolt_engine.py b/salt/engines/slack_bolt_engine.py index 0a0417d160af..75eb0909e48e 100644 --- a/salt/engines/slack_bolt_engine.py +++ b/salt/engines/slack_bolt_engine.py @@ -111,7 +111,7 @@ .. code-block:: text engines: - - slack: + - slack_bolt: app_token: "xapp-x-xxxxxxxxxxx-xxxxxxxxxxxxx-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" bot_token: 'xoxb-xxxxxxxxxx-xxxxxxxxxxxxxxxxxxxxxxxx' control: True @@ -149,7 +149,7 @@ .. code-block:: text engines: - - slack: + - slack_bolt: groups_pillar: slack_engine_pillar app_token: "xapp-x-xxxxxxxxxxx-xxxxxxxxxxxxx-xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx" bot_token: 'xoxb-xxxxxxxxxx-xxxxxxxxxxxxxxxxxxxxxxxx' @@ -234,7 +234,7 @@ def __init__(self, app_token, bot_token, trigger_string): self.msg_queue = collections.deque() - trigger_pattern = "(^{}.*)".format(trigger_string) + trigger_pattern = f"(^{trigger_string}.*)" # Register message_trigger when we see messages that start # with the trigger string @@ -843,7 +843,7 @@ def get_jobs_from_runner(self, outstanding_jids): results = {} for jid in outstanding_jids: # results[jid] = runner.cmd('jobs.lookup_jid', [jid]) - if self.master_minion.returners["{}.get_jid".format(source)](jid): + if self.master_minion.returners[f"{source}.get_jid"](jid): job_result = runner.cmd("jobs.list_job", [jid]) jid_result = job_result.get("Result", {}) jid_function = job_result.get("Function", {}) @@ -954,7 +954,7 @@ def run_commands_from_slack_async( ) ts = time.time() st = datetime.datetime.fromtimestamp(ts).strftime("%Y%m%d%H%M%S%f") - filename = "salt-results-{}.yaml".format(st) + filename = f"salt-results-{st}.yaml" resp = self.app.client.files_upload( channels=channel, filename=filename, @@ -1075,4 +1075,4 @@ def start( ) client.run_commands_from_slack_async(message_generator, fire_all, tag, control) except Exception: # pylint: disable=broad-except - raise Exception("{}".format(traceback.format_exc())) + raise Exception(f"{traceback.format_exc()}") diff --git a/salt/fileclient.py b/salt/fileclient.py index 39e8d5dfbfb9..42e7120aab18 100644 --- a/salt/fileclient.py +++ b/salt/fileclient.py @@ -9,6 +9,7 @@ import os import shutil import string +import time import urllib.error import urllib.parse @@ -31,7 +32,8 @@ import salt.utils.url import salt.utils.verify import salt.utils.versions -from salt.exceptions import CommandExecutionError, MinionError +from salt.config import DEFAULT_HASH_TYPE +from salt.exceptions import CommandExecutionError, MinionError, SaltClientError from salt.ext.tornado.httputil import ( HTTPHeaders, HTTPInputError, @@ -43,12 +45,16 @@ MAX_FILENAME_LENGTH = 255 -def get_file_client(opts, pillar=False): +def get_file_client(opts, pillar=False, force_local=False): """ Read in the ``file_client`` option and return the correct type of file server """ - client = opts.get("file_client", "remote") + if force_local: + client = "local" + else: + client = opts.get("file_client", "remote") + if pillar and client == "local": client = "pillar" return {"remote": RemoteClient, "local": FSClient, "pillar": PillarClient}.get( @@ -793,7 +799,7 @@ def on_chunk(chunk): opts=self.opts, verify_ssl=verify_ssl, header_dict=header_dict, - **get_kwargs + **get_kwargs, ) # 304 Not Modified is returned when If-None-Match header @@ -822,7 +828,7 @@ def on_chunk(chunk): "HTTP error {0} reading {1}: {3}".format( exc.code, url, - *http.server.BaseHTTPRequestHandler.responses[exc.code] + *http.server.BaseHTTPRequestHandler.responses[exc.code], ) ) except urllib.error.URLError as exc: @@ -839,7 +845,7 @@ def get_template( makedirs=False, saltenv="base", cachedir=None, - **kwargs + **kwargs, ): """ Cache a file then process it as a template @@ -849,7 +855,6 @@ def get_template( kwargs.pop("env") kwargs["saltenv"] = saltenv - url_data = urllib.parse.urlparse(url) sfn = self.cache_file(url, saltenv, cachedir=cachedir) if not sfn or not os.path.exists(sfn): return "" @@ -892,6 +897,15 @@ def _extrn_path(self, url, saltenv, cachedir=None): # Strip user:pass from URLs netloc = netloc.split("@")[-1] + try: + if url_data.port: + # Remove : from path + netloc = netloc.replace(":", "") + except ValueError: + # On Windows urllib raises a ValueError + # when using a file:// source and trying + # to access the port attribute. + pass if cachedir is None: cachedir = self.opts["cachedir"] @@ -1040,7 +1054,7 @@ def hash_file(self, path, saltenv="base"): # Local file path fnd_path = fnd - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(fnd_path, form=hash_type) ret["hash_type"] = hash_type return ret @@ -1071,7 +1085,7 @@ def hash_and_stat_file(self, path, saltenv="base"): except Exception: # pylint: disable=broad-except fnd_stat = None - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(fnd_path, form=hash_type) ret["hash_type"] = hash_type return ret, fnd_stat @@ -1137,11 +1151,17 @@ def _refresh_channel(self): self.channel = salt.channel.client.ReqChannel.factory(self.opts) return self.channel - # pylint: disable=no-dunder-del - def __del__(self): - self.destroy() - - # pylint: enable=no-dunder-del + def _channel_send(self, load, raw=False): + start = time.monotonic() + try: + return self.channel.send( + load, + raw=raw, + ) + except salt.exceptions.SaltReqTimeoutError: + raise SaltClientError( + f"File client timed out after {int(time.time() - start)}" + ) def destroy(self): if self._closing: @@ -1171,13 +1191,8 @@ def get_file( if not salt.utils.platform.is_windows(): hash_server, stat_server = self.hash_and_stat_file(path, saltenv) - try: - mode_server = stat_server[0] - except (IndexError, TypeError): - mode_server = None else: hash_server = self.hash_file(path, saltenv) - mode_server = None # Check if file exists on server, before creating files and # directories @@ -1220,13 +1235,8 @@ def get_file( if dest2check and os.path.isfile(dest2check): if not salt.utils.platform.is_windows(): hash_local, stat_local = self.hash_and_stat_file(dest2check, saltenv) - try: - mode_local = stat_local[0] - except (IndexError, TypeError): - mode_local = None else: hash_local = self.hash_file(dest2check, saltenv) - mode_local = None if hash_local == hash_server: return dest2check @@ -1267,7 +1277,10 @@ def get_file( load["loc"] = 0 else: load["loc"] = fn_.tell() - data = self.channel.send(load, raw=True) + data = self._channel_send( + load, + raw=True, + ) # Sometimes the source is local (eg when using # 'salt.fileserver.FSChan'), in which case the keys are # already strings. Sometimes the source is remote, in which @@ -1291,7 +1304,7 @@ def get_file( hsum = salt.utils.hashutils.get_hash( dest, salt.utils.stringutils.to_str( - data.get("hash_type", b"md5") + data.get("hash_type", DEFAULT_HASH_TYPE) ), ) if hsum != data["hsum"]: @@ -1360,28 +1373,36 @@ def file_list(self, saltenv="base", prefix=""): List the files on the master """ load = {"saltenv": saltenv, "prefix": prefix, "cmd": "_file_list"} - return self.channel.send(load) + return self._channel_send( + load, + ) def file_list_emptydirs(self, saltenv="base", prefix=""): """ List the empty dirs on the master """ load = {"saltenv": saltenv, "prefix": prefix, "cmd": "_file_list_emptydirs"} - return self.channel.send(load) + return self._channel_send( + load, + ) def dir_list(self, saltenv="base", prefix=""): """ List the dirs on the master """ load = {"saltenv": saltenv, "prefix": prefix, "cmd": "_dir_list"} - return self.channel.send(load) + return self._channel_send( + load, + ) def symlink_list(self, saltenv="base", prefix=""): """ List symlinked files and dirs on the master """ load = {"saltenv": saltenv, "prefix": prefix, "cmd": "_symlink_list"} - return self.channel.send(load) + return self._channel_send( + load, + ) def __hash_and_stat_file(self, path, saltenv="base"): """ @@ -1397,12 +1418,14 @@ def __hash_and_stat_file(self, path, saltenv="base"): return {}, None else: ret = {} - hash_type = self.opts.get("hash_type", "md5") + hash_type = self.opts.get("hash_type", DEFAULT_HASH_TYPE) ret["hsum"] = salt.utils.hashutils.get_hash(path, form=hash_type) ret["hash_type"] = hash_type return ret load = {"path": path, "saltenv": saltenv, "cmd": "_file_hash"} - return self.channel.send(load) + return self._channel_send( + load, + ) def hash_file(self, path, saltenv="base"): """ @@ -1429,7 +1452,9 @@ def hash_and_stat_file(self, path, saltenv="base"): except Exception: # pylint: disable=broad-except return hash_result, None load = {"path": path, "saltenv": saltenv, "cmd": "_file_find"} - fnd = self.channel.send(load) + fnd = self._channel_send( + load, + ) try: stat_result = fnd.get("stat") except AttributeError: @@ -1441,21 +1466,27 @@ def list_env(self, saltenv="base"): Return a list of the files in the file server's specified environment """ load = {"saltenv": saltenv, "cmd": "_file_list"} - return self.channel.send(load) + return self._channel_send( + load, + ) def envs(self): """ Return a list of available environments """ load = {"cmd": "_file_envs"} - return self.channel.send(load) + return self._channel_send( + load, + ) def master_opts(self): """ Return the master opts data """ load = {"cmd": "_master_opts"} - return self.channel.send(load) + return self._channel_send( + load, + ) def master_tops(self): """ @@ -1464,7 +1495,15 @@ def master_tops(self): load = {"cmd": "_master_tops", "id": self.opts["id"], "opts": self.opts} if self.auth: load["tok"] = self.auth.gen_token(b"salt") - return self.channel.send(load) + return self._channel_send( + load, + ) + + def __enter__(self): + return self + + def __exit__(self, *args): + self.destroy() class FSClient(RemoteClient): @@ -1493,3 +1532,17 @@ class DumbAuth: def gen_token(self, clear_tok): return clear_tok + + +class ContextlessFileClient: + def __init__(self, file_client): + self.file_client = file_client + + def __getattr__(self, key): + return getattr(self.file_client, key) + + def __exit__(self, *_): + pass + + def __enter__(self): + return self diff --git a/salt/fileserver/hgfs.py b/salt/fileserver/hgfs.py index baafa46bd8c6..a7f548ac6a9e 100644 --- a/salt/fileserver/hgfs.py +++ b/salt/fileserver/hgfs.py @@ -35,7 +35,6 @@ - python bindings for mercurial (``python-hglib``) """ - import copy import errno import fnmatch @@ -54,6 +53,7 @@ import salt.utils.stringutils import salt.utils.url import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import FileserverConfigError from salt.utils.event import tagify @@ -308,7 +308,7 @@ def init(): # mountpoint not specified pass - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(repo_url.encode("utf-8")).hexdigest() rp_ = os.path.join(bp_, repo_hash) if not os.path.isdir(rp_): diff --git a/salt/fileserver/svnfs.py b/salt/fileserver/svnfs.py index c45365fafb6f..48843f22e67c 100644 --- a/salt/fileserver/svnfs.py +++ b/salt/fileserver/svnfs.py @@ -49,6 +49,7 @@ import salt.utils.stringutils import salt.utils.url import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import FileserverConfigError from salt.utils.event import tagify @@ -192,7 +193,7 @@ def init(): # mountpoint not specified pass - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(repo_url).hexdigest() rp_ = os.path.join(bp_, repo_hash) if not os.path.isdir(rp_): diff --git a/salt/grains/core.py b/salt/grains/core.py index f4361a693612..5ac64007564a 100644 --- a/salt/grains/core.py +++ b/salt/grains/core.py @@ -23,8 +23,6 @@ import uuid from errno import EACCES, EPERM -import distro - import salt.exceptions # Solve the Chicken and egg problem where grains need to run before any @@ -41,6 +39,7 @@ import salt.utils.platform import salt.utils.stringutils from salt.utils.network import _clear_interfaces, _get_interfaces +from salt.utils.platform import linux_distribution as _linux_distribution try: # pylint: disable=no-name-in-module @@ -90,15 +89,6 @@ def _freedesktop_os_release(): return _parse_os_release("/etc/os-release", "/usr/lib/os-release") -# rewrite distro.linux_distribution to allow best=True kwarg in version(), needed to get the minor version numbers in CentOS -def _linux_distribution(): - return ( - distro.id(), - distro.version(best=True), - distro.codename(), - ) - - def __init__(opts): _clear_interfaces() @@ -487,7 +477,7 @@ def _bsd_cpudata(osdata): return grains -def _sunos_cpudata(): +def _sunos_cpudata(): # pragma: no cover """ Return the CPU information for Solaris-like systems """ @@ -519,7 +509,7 @@ def _sunos_cpudata(): return grains -def _aix_cpudata(): +def _aix_cpudata(): # pragma: no cover """ Return CPU information for AIX systems """ @@ -622,7 +612,7 @@ def _bsd_memdata(osdata): return grains -def _sunos_memdata(): +def _sunos_memdata(): # pragma: no cover """ Return the memory information for SunOS-like systems """ @@ -646,7 +636,7 @@ def _sunos_memdata(): return grains -def _aix_memdata(): +def _aix_memdata(): # pragma: no cover """ Return the memory information for AIX systems """ @@ -700,16 +690,16 @@ def _memdata(osdata): grains.update(_bsd_memdata(osdata)) elif osdata["kernel"] == "Darwin": grains.update(_osx_memdata()) - elif osdata["kernel"] == "SunOS": - grains.update(_sunos_memdata()) - elif osdata["kernel"] == "AIX": - grains.update(_aix_memdata()) + elif osdata["kernel"] == "SunOS": # pragma: no cover + grains.update(_sunos_memdata()) # pragma: no cover + elif osdata["kernel"] == "AIX": # pragma: no cover + grains.update(_aix_memdata()) # pragma: no cover elif osdata["kernel"] == "Windows" and HAS_WMI: grains.update(_windows_memdata()) return grains -def _aix_get_machine_id(): +def _aix_get_machine_id(): # pragma: no cover """ Parse the output of lsattr -El sys0 for os_uuid """ @@ -2116,9 +2106,11 @@ def _os_release_quirks_for_osrelease(os_release): if os_release["ID"] in ("mendel",): # Mendel sets VERSION_CODENAME but not VERSION_ID. # Only PRETTY_NAME mentions the version number. - match = _PRETTY_NAME_RE.match(os_release["PRETTY_NAME"]) - if match: - return match.group("version") + # for example: Mendel GNU/Linux 5 (Eagle) + test_strg = os_release["PRETTY_NAME"].split() + if len(test_strg) >= 3: + return test_strg[2] + return None @@ -2185,8 +2177,9 @@ def _linux_distribution_data(): "rocky", "ubuntu", ): - # Solely use os-release data. See description of the function. - return grains + if lsb_has_error is False: + # Solely use os-release data. See description of the function. + return grains except OSError: os_release = {} @@ -2240,6 +2233,10 @@ def _legacy_linux_distribution_data(grains, os_release, lsb_has_error): cpe.get("version") and cpe.get("vendor") == "opensuse" ): # Keep VERSION_ID for SLES grains["lsb_distrib_release"] = cpe["version"] + if "ID" in os_release and os_release["ID"].strip() == "mendel": + test_strg = os_release["PRETTY_NAME"].split() + if len(test_strg) >= 3: + grains["lsb_distrib_release"] = test_strg[2] elif os.path.isfile("/etc/SuSE-release"): log.trace("Parsing distrib info from /etc/SuSE-release") @@ -2357,6 +2354,20 @@ def _legacy_linux_distribution_data(grains, os_release, lsb_has_error): ): grains.pop("lsb_distrib_release", None) grains["osrelease"] = grains.get("lsb_distrib_release", osrelease).strip() + + # allow for codename being within brackets on certain OS + if grains.get("lsb_distrib_codename", "") and ( + any(os in grains.get("os", "") for os in ["Rocky", "AlmaLinux", "AstraLinuxSE"]) + ): + test_strg = grains["lsb_distrib_codename"].split("(", maxsplit=1) + if len(test_strg) >= 2: + test_strg_2 = test_strg[1].split(")", maxsplit=1) + if grains["os"] == "AstraLinuxSE": + # AstraLinuxSE has version aka 'Smolensk 1.6' + grains["lsb_distrib_codename"] = test_strg_2[0].split()[0].lower() + else: + grains["lsb_distrib_codename"] = test_strg_2[0] + grains["oscodename"] = grains.get("lsb_distrib_codename", "").strip() or oscodename if "Red Hat" in grains["oscodename"]: grains["oscodename"] = oscodename @@ -2471,7 +2482,7 @@ def _smartos_os_data(): return grains -def _sunos_release(): +def _sunos_release(): # pragma: no cover grains = {} with salt.utils.files.fopen("/etc/release", "r") as fp_: rel_data = fp_.read() @@ -2708,10 +2719,8 @@ def locale_info(): ( grains["locale_info"]["defaultlanguage"], grains["locale_info"]["defaultencoding"], - ) = locale.getdefaultlocale() + ) = locale.getlocale() except Exception: # pylint: disable=broad-except - # locale.getdefaultlocale can ValueError!! Catch anything else it - # might do, per #2205 grains["locale_info"]["defaultlanguage"] = "unknown" grains["locale_info"]["defaultencoding"] = "unknown" grains["locale_info"]["detectedencoding"] = __salt_system_encoding__ diff --git a/salt/grains/mdata.py b/salt/grains/mdata.py index 3077f321cc91..fe88f6ce2a68 100644 --- a/salt/grains/mdata.py +++ b/salt/grains/mdata.py @@ -152,6 +152,3 @@ def mdata(): ) return grains - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/grains/metadata_gce.py b/salt/grains/metadata_gce.py index c641843a30d0..0c98a03b6ae2 100644 --- a/salt/grains/metadata_gce.py +++ b/salt/grains/metadata_gce.py @@ -2,7 +2,7 @@ Grains from cloud metadata servers at 169.254.169.254 in google compute engine -.. versionadded:: 3005.0 +.. versionadded:: 3005 :depends: requests diff --git a/salt/grains/smartos.py b/salt/grains/smartos.py index 6195908ceca8..62e24b3798b0 100644 --- a/salt/grains/smartos.py +++ b/salt/grains/smartos.py @@ -213,6 +213,3 @@ def smartos(): ) return grains - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/grains/zfs.py b/salt/grains/zfs.py index e903d010832a..62f8f3def79a 100644 --- a/salt/grains/zfs.py +++ b/salt/grains/zfs.py @@ -81,6 +81,3 @@ def zfs(): ) return grains - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/loader/__init__.py b/salt/loader/__init__.py index 72a5e5440128..8f2a69dc6b6c 100644 --- a/salt/loader/__init__.py +++ b/salt/loader/__init__.py @@ -263,6 +263,7 @@ def minion_mods( notify=False, static_modules=None, proxy=None, + file_client=None, ): """ Load execution modules @@ -314,6 +315,7 @@ def minion_mods( "__utils__": utils, "__proxy__": proxy, "__opts__": opts, + "__file_client__": file_client, }, whitelist=whitelist, loaded_base_name=loaded_base_name, @@ -778,6 +780,7 @@ def states( proxy=None, context=None, loaded_base_name=None, + file_client=None, ): """ Returns the state modules @@ -815,6 +818,7 @@ def states( "__utils__": utils, "__serializers__": serializers, "__context__": context, + "__file_client__": file_client, }, whitelist=whitelist, extra_module_dirs=utils.module_dirs if utils else None, diff --git a/salt/loader/context.py b/salt/loader/context.py index cc45441c96b9..6bbfe4dbd815 100644 --- a/salt/loader/context.py +++ b/salt/loader/context.py @@ -32,7 +32,7 @@ def loader_context(loader): class NamedLoaderContext(collections.abc.MutableMapping): """ A NamedLoaderContext object is injected by the loader providing access to - Salt's 'magic dunders' (__salt__, __utils__, ect). + Salt's 'magic dunders' (__salt__, __utils__, etc). """ def __init__(self, name, loader_context, default=None): @@ -84,11 +84,7 @@ def __setitem__(self, item, value): self.value()[item] = value def __bool__(self): - try: - self.loader - except LookupError: - return False - return True + return bool(self.value()) def __len__(self): return self.value().__len__() diff --git a/salt/loader/dunder.py b/salt/loader/dunder.py new file mode 100644 index 000000000000..5fae90147972 --- /dev/null +++ b/salt/loader/dunder.py @@ -0,0 +1,9 @@ +""" +Salt dunders. +""" +import salt.loader.context + +loader_context = salt.loader.context.LoaderContext() + + +__file_client__ = loader_context.named_context("__file_client__") diff --git a/salt/loader/lazy.py b/salt/loader/lazy.py index d319fe54b429..8198b4f62e9f 100644 --- a/salt/loader/lazy.py +++ b/salt/loader/lazy.py @@ -144,9 +144,22 @@ def __getattr__(self, name): def __call__(self, *args, **kwargs): run_func = self.func + mod = sys.modules[run_func.__module__] + # All modules we've imported should have __opts__ defined. There are + # cases in the test suite where mod ends up being something other than + # a module we've loaded. + set_test = False + if hasattr(mod, "__opts__"): + if not isinstance(mod.__opts__, salt.loader.context.NamedLoaderContext): + if "test" in self.loader.opts: + mod.__opts__["test"] = self.loader.opts["test"] + set_test = True if self.loader.inject_globals: run_func = global_injector_decorator(self.loader.inject_globals)(run_func) - return self.loader.run(run_func, *args, **kwargs) + ret = self.loader.run(run_func, *args, **kwargs) + if set_test: + self.loader.opts["test"] = mod.__opts__["test"] + return ret def __repr__(self): return "<{} name={!r}>".format(self.__class__.__name__, self.name) diff --git a/salt/log/__init__.py b/salt/log/__init__.py index 3458474f2ca6..45202cffb0f0 100644 --- a/salt/log/__init__.py +++ b/salt/log/__init__.py @@ -9,6 +9,8 @@ are made to assure backwards compatibility. """ +# pylint: disable = no-name-in-module + # Import several classes/functions from salt.log.setup for backwards compatibility from salt._logging import LOG_LEVELS, SORTED_LEVEL_NAMES from salt.log.setup import ( diff --git a/salt/master.py b/salt/master.py index e6eab0103d9f..5a317bf93b72 100644 --- a/salt/master.py +++ b/salt/master.py @@ -186,6 +186,7 @@ def __init__(self, opts, **kwargs): # Track key rotation intervals self.rotate = int(time.time()) # A serializer for general maint operations + self.restart_interval = int(self.opts["maintenance_interval"]) def _post_fork_init(self): """ @@ -243,21 +244,28 @@ def run(self): # init things that need to be done after the process is forked self._post_fork_init() - # Make Start Times - last = int(time.time()) + # Start of process for maintenance process restart interval + start = time.time() + + # Unset last value will cause the interval items to run on the first + # loop iteration. This ensurs we always run them even if + # maintenance_interval happens to be less than loop_interval or + # git_update_interval + last = None + # update git_pillar on first loop last_git_pillar_update = 0 + now = int(time.time()) git_pillar_update_interval = self.opts.get("git_pillar_update_interval", 0) old_present = set() - while True: - now = int(time.time()) + while time.time() - start < self.restart_interval: log.trace("Running maintenance routines") - if (now - last) >= self.loop_interval: + if not last or (now - last) >= self.loop_interval: salt.daemons.masterapi.clean_old_jobs(self.opts) salt.daemons.masterapi.clean_expired_tokens(self.opts) salt.daemons.masterapi.clean_pub_auth(self.opts) - if (now - last_git_pillar_update) >= git_pillar_update_interval: + if not last or (now - last_git_pillar_update) >= git_pillar_update_interval: last_git_pillar_update = now self.handle_git_pillar() self.handle_schedule() @@ -266,6 +274,7 @@ def run(self): self.handle_key_rotate(now) salt.utils.verify.check_max_open_files(self.opts) last = now + now = int(time.time()) time.sleep(self.loop_interval) def handle_key_cache(self): @@ -462,7 +471,7 @@ def _do_update(backends): ) @classmethod - def update(cls, interval, backends, timeout=300): + def update(cls, interval, backends, timeout): """ Threading target which handles all updates for a given wait interval """ @@ -503,7 +512,11 @@ def run(self): for interval in self.buckets: self.update_threads[interval] = threading.Thread( target=self.update, - args=(interval, self.buckets[interval]), + args=( + interval, + self.buckets[interval], + self.opts["fileserver_interval"], + ), ) self.update_threads[interval].start() @@ -1769,7 +1782,7 @@ def minion_runner(self, clear_load): def pub_ret(self, load): """ Request the return data from a specific jid, only allowed - if the requesting minion also initialted the execution. + if the requesting minion also initiated the execution. :param dict load: The minion payload diff --git a/salt/matchers/compound_match.py b/salt/matchers/compound_match.py index 538d2f92a375..2bce58f117a2 100644 --- a/salt/matchers/compound_match.py +++ b/salt/matchers/compound_match.py @@ -22,7 +22,6 @@ def _load_matchers(opts): """ Store matchers in __context__ so they're only loaded once """ - __context__["matchers"] = {} __context__["matchers"] = salt.loader.matchers(opts) diff --git a/salt/matchers/nodegroup_match.py b/salt/matchers/nodegroup_match.py index 1ce621510fb3..c2b57dc612f3 100644 --- a/salt/matchers/nodegroup_match.py +++ b/salt/matchers/nodegroup_match.py @@ -14,7 +14,6 @@ def _load_matchers(opts): """ Store matchers in __context__ so they're only loaded once """ - __context__["matchers"] = {} __context__["matchers"] = salt.loader.matchers(opts) diff --git a/salt/metaproxy/deltaproxy.py b/salt/metaproxy/deltaproxy.py index c3003b368f7b..aacc4a336a23 100644 --- a/salt/metaproxy/deltaproxy.py +++ b/salt/metaproxy/deltaproxy.py @@ -1,8 +1,8 @@ # # Proxy minion metaproxy modules # - import concurrent.futures +import copy import logging import os import signal @@ -96,9 +96,11 @@ def post_master_init(self, master): if "proxy" not in self.opts: self.opts["proxy"] = self.opts["pillar"]["proxy"] + pillar = copy.deepcopy(self.opts["pillar"]) + pillar.pop("master", None) self.opts = salt.utils.dictupdate.merge( self.opts, - self.opts["pillar"], + pillar, strategy=self.opts.get("proxy_merge_pillar_in_opts_strategy"), merge_lists=self.opts.get("proxy_deep_merge_pillar_in_opts", False), ) @@ -231,10 +233,11 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) log.info("Added mine.update to scheduler") else: - self.schedule.delete_job("__mine_interval", persist=True) + self.schedule.delete_job("__mine_interval", persist=True, fire_event=False) # add master_alive job if enabled if self.opts["transport"] != "tcp" and self.opts["master_alive_interval"] > 0: @@ -250,6 +253,7 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) if ( self.opts["master_failback"] @@ -268,18 +272,24 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) else: self.schedule.delete_job( - salt.minion.master_event(type="failback"), persist=True + salt.minion.master_event(type="failback"), + persist=True, + fire_event=False, ) else: self.schedule.delete_job( salt.minion.master_event(type="alive", master=self.opts["master"]), persist=True, + fire_event=False, ) self.schedule.delete_job( - salt.minion.master_event(type="failback"), persist=True + salt.minion.master_event(type="failback"), + persist=True, + fire_event=False, ) # proxy keepalive @@ -304,10 +314,15 @@ def post_master_init(self, master): } }, persist=True, + fire_event=False, ) - self.schedule.enable_schedule() + self.schedule.enable_schedule(fire_event=False) else: - self.schedule.delete_job("__proxy_keepalive", persist=True) + self.schedule.delete_job( + "__proxy_keepalive", + persist=True, + fire_event=False, + ) # Sync the grains here so the proxy can communicate them to the master self.functions["saltutil.sync_grains"](saltenv="base") @@ -321,10 +336,11 @@ def post_master_init(self, master): self.proxy_context = {} self.add_periodic_callback("cleanup", self.cleanup_subprocesses) + _failed = list() if self.opts["proxy"].get("parallel_startup"): log.debug("Initiating parallel startup for proxies") with concurrent.futures.ThreadPoolExecutor() as executor: - futures = [ + futures = { executor.submit( subproxy_post_master_init, _id, @@ -332,12 +348,22 @@ def post_master_init(self, master): self.opts, self.proxy, self.utils, - ) + ): _id for _id in self.opts["proxy"].get("ids", []) - ] + } - for f in concurrent.futures.as_completed(futures): - sub_proxy_data = f.result() + for future in concurrent.futures.as_completed(futures): + try: + sub_proxy_data = future.result() + except Exception as exc: # pylint: disable=broad-except + _id = futures[future] + log.info( + "An exception occured during initialization for %s, skipping: %s", + _id, + exc, + ) + _failed.append(_id) + continue minion_id = sub_proxy_data["proxy_opts"].get("id") if sub_proxy_data["proxy_minion"]: @@ -347,16 +373,24 @@ def post_master_init(self, master): if self.deltaproxy_opts[minion_id] and self.deltaproxy_objs[minion_id]: self.deltaproxy_objs[ minion_id - ].req_channel = salt.transport.client.AsyncReqChannel.factory( + ].req_channel = salt.channel.client.AsyncReqChannel.factory( sub_proxy_data["proxy_opts"], io_loop=self.io_loop ) else: log.debug("Initiating non-parallel startup for proxies") for _id in self.opts["proxy"].get("ids", []): - sub_proxy_data = subproxy_post_master_init( - _id, uid, self.opts, self.proxy, self.utils - ) - + try: + sub_proxy_data = subproxy_post_master_init( + _id, uid, self.opts, self.proxy, self.utils + ) + except Exception as exc: # pylint: disable=broad-except + log.info( + "An exception occured during initialization for %s, skipping: %s", + _id, + exc, + ) + _failed.append(_id) + continue minion_id = sub_proxy_data["proxy_opts"].get("id") if sub_proxy_data["proxy_minion"]: @@ -366,10 +400,12 @@ def post_master_init(self, master): if self.deltaproxy_opts[minion_id] and self.deltaproxy_objs[minion_id]: self.deltaproxy_objs[ minion_id - ].req_channel = salt.transport.client.AsyncReqChannel.factory( + ].req_channel = salt.channel.client.AsyncReqChannel.factory( sub_proxy_data["proxy_opts"], io_loop=self.io_loop ) + if _failed: + log.info("Following sub proxies failed %s", _failed) self.ready = True @@ -535,15 +571,18 @@ def subproxy_post_master_init(minion_id, uid, opts, main_proxy, main_utils): } }, persist=True, + fire_event=False, ) - _proxy_minion.schedule.enable_schedule() + _proxy_minion.schedule.enable_schedule(fire_event=False) else: - _proxy_minion.schedule.delete_job("__proxy_keepalive", persist=True) + _proxy_minion.schedule.delete_job( + "__proxy_keepalive", persist=True, fire_event=False + ) return {"proxy_minion": _proxy_minion, "proxy_opts": proxyopts} -def target(cls, minion_instance, opts, data, connected): +def target(cls, minion_instance, opts, data, connected, creds_map): """ Handle targeting of the minion. @@ -556,6 +595,8 @@ def target(cls, minion_instance, opts, data, connected): minion_instance.opts["id"], opts["id"], ) + if creds_map: + salt.crypt.AsyncAuth.creds_map = creds_map if not hasattr(minion_instance, "proc_dir"): uid = salt.utils.user.get_uid(user=opts.get("user", None)) @@ -1024,21 +1065,23 @@ def handle_decoded_payload(self, data): instance = self multiprocessing_enabled = self.opts.get("multiprocessing", True) name = "ProcessPayload(jid={})".format(data["jid"]) + creds_map = None if multiprocessing_enabled: if salt.utils.platform.spawning_platform(): # let python reconstruct the minion on the other side if we"re # running on spawning platforms instance = None + creds_map = salt.crypt.AsyncAuth.creds_map with default_signals(signal.SIGINT, signal.SIGTERM): process = SignalHandlingProcess( target=target, - args=(self, instance, self.opts, data, self.connected), + args=(self, instance, self.opts, data, self.connected, creds_map), name=name, ) else: process = threading.Thread( target=target, - args=(self, instance, self.opts, data, self.connected), + args=(self, instance, self.opts, data, self.connected, creds_map), name=name, ) diff --git a/salt/metaproxy/proxy.py b/salt/metaproxy/proxy.py index a399c15ef16e..04d4c02c75a6 100644 --- a/salt/metaproxy/proxy.py +++ b/salt/metaproxy/proxy.py @@ -2,6 +2,7 @@ # Proxy minion metaproxy modules # +import copy import logging import os import signal @@ -91,10 +92,13 @@ def post_master_init(self, master): self.opts["proxy"] = self.opts["pillar"]["proxy"] if self.opts.get("proxy_merge_pillar_in_opts"): - # Override proxy opts with pillar data when the user required. + # Override proxy opts with pillar data when the user required. But do + # not override master in opts. + pillar = copy.deepcopy(self.opts["pillar"]) + pillar.pop("master", None) self.opts = salt.utils.dictupdate.merge( self.opts, - self.opts["pillar"], + pillar, strategy=self.opts.get("proxy_merge_pillar_in_opts_strategy"), merge_lists=self.opts.get("proxy_deep_merge_pillar_in_opts", False), ) @@ -309,13 +313,15 @@ def post_master_init(self, master): self.ready = True -def target(cls, minion_instance, opts, data, connected): +def target(cls, minion_instance, opts, data, connected, creds_map): """ Handle targeting of the minion. Calling _thread_multi_return or _thread_return depending on a single or multiple commands. """ + if creds_map: + salt.crypt.AsyncAuth.creds_map = creds_map if not minion_instance: minion_instance = cls(opts) minion_instance.connected = connected @@ -814,21 +820,23 @@ def handle_decoded_payload(self, data): instance = self multiprocessing_enabled = self.opts.get("multiprocessing", True) name = "ProcessPayload(jid={})".format(data["jid"]) + creds_map = None if multiprocessing_enabled: if salt.utils.platform.spawning_platform(): # let python reconstruct the minion on the other side if we're # running on windows instance = None + creds_map = salt.crypt.AsyncAuth.creds_map with default_signals(signal.SIGINT, signal.SIGTERM): process = SignalHandlingProcess( target=self._target, name=name, - args=(instance, self.opts, data, self.connected), + args=(instance, self.opts, data, self.connected, creds_map), ) else: process = threading.Thread( target=self._target, - args=(instance, self.opts, data, self.connected), + args=(instance, self.opts, data, self.connected, creds_map), name=name, ) diff --git a/salt/minion.py b/salt/minion.py index 2515c36d5da6..29afda235042 100644 --- a/salt/minion.py +++ b/salt/minion.py @@ -44,6 +44,7 @@ import salt.utils.dictupdate import salt.utils.error import salt.utils.event +import salt.utils.extmods import salt.utils.files import salt.utils.jid import salt.utils.minion @@ -113,6 +114,29 @@ # 6. Handle publications +def _sync_grains(opts): + # need sync of custom grains as may be used in pillar compilation + # if coming up initially and remote client, the first sync _grains + # doesn't have opts["master_uri"] set yet during the sync, so need + # to force local, otherwise will throw an exception when attempting + # to retrieve opts["master_uri"] when retrieving key for remote communication + # in addition opts sometimes does not contain extmod_whitelist and extmod_blacklist + # hence set those to defaults, empty dict, if not part of opts, as ref'd in + # salt.utils.extmod sync function + if opts.get("extmod_whitelist", None) is None: + opts["extmod_whitelist"] = {} + + if opts.get("extmod_blacklist", None) is None: + opts["extmod_blacklist"] = {} + + if opts.get("file_client", "remote") == "remote" and not opts.get( + "master_uri", None + ): + salt.utils.extmods.sync(opts, "grains", force_local=True) + else: + salt.utils.extmods.sync(opts, "grains") + + def resolve_dns(opts, fallback=True): """ Resolves the master_ip and master_uri options @@ -327,9 +351,12 @@ def load_args_and_kwargs(func, args, data=None, ignore_invalid=False): invalid_kwargs = [] for arg in args: - if isinstance(arg, dict) and arg.pop("__kwarg__", False) is True: + if isinstance(arg, dict) and arg.get("__kwarg__", False) is True: # if the arg is a dict with __kwarg__ == True, then its a kwarg for key, val in arg.items(): + # Skip __kwarg__ when checking kwargs + if key == "__kwarg__": + continue if argspec.keywords or key in argspec.args: # Function supports **kwargs or is a positional argument to # the function. @@ -917,6 +944,7 @@ def __init__(self, opts, context=None): # Late setup of the opts grains, so we can log from the grains module import salt.loader + _sync_grains(opts) opts["grains"] = salt.loader.grains(opts) super().__init__(opts) @@ -925,7 +953,18 @@ def __init__(self, opts, context=None): "use_master_when_local", False ): io_loop = salt.ext.tornado.ioloop.IOLoop.current() - io_loop.run_sync(lambda: self.eval_master(self.opts, failed=True)) + + @salt.ext.tornado.gen.coroutine + def eval_master(): + """ + Wrap eval master in order to close the returned publish channel. + """ + master, pub_channel = yield self.eval_master(self.opts, failed=True) + pub_channel.close() + + io_loop.run_sync( + lambda: eval_master() # pylint: disable=unnecessary-lambda + ) self.gen_modules(initial_load=True, context=context) # If configured, cache pillar data on the minion @@ -1363,7 +1402,7 @@ def connect_master(self, failed=False): ) # a long-running req channel - self.req_channel = salt.transport.client.AsyncReqChannel.factory( + self.req_channel = salt.channel.client.AsyncReqChannel.factory( self.opts, io_loop=self.io_loop ) @@ -1607,7 +1646,9 @@ def _send_req_sync(self, load, timeout): "minion", opts=self.opts, listen=False ) as event: return event.fire_event( - load, "__master_req_channel_payload", timeout=timeout + load, + f"__master_req_channel_payload/{self.opts['master']}", + timeout=timeout, ) @salt.ext.tornado.gen.coroutine @@ -1624,7 +1665,9 @@ def _send_req_async(self, load, timeout): "minion", opts=self.opts, listen=False ) as event: ret = yield event.fire_event_async( - load, "__master_req_channel_payload", timeout=timeout + load, + f"__master_req_channel_payload/{self.opts['master']}", + timeout=timeout, ) raise salt.ext.tornado.gen.Return(ret) @@ -1763,6 +1806,7 @@ def _handle_decoded_payload(self, data): # python needs to be able to reconstruct the reference on the other # side. instance = self + creds_map = None multiprocessing_enabled = self.opts.get("multiprocessing", True) name = "ProcessPayload(jid={})".format(data["jid"]) if multiprocessing_enabled: @@ -1770,17 +1814,18 @@ def _handle_decoded_payload(self, data): # let python reconstruct the minion on the other side if we're # running on windows instance = None + creds_map = salt.crypt.AsyncAuth.creds_map with default_signals(signal.SIGINT, signal.SIGTERM): process = SignalHandlingProcess( target=self._target, name=name, - args=(instance, self.opts, data, self.connected), + args=(instance, self.opts, data, self.connected, creds_map), ) process.register_after_fork_method(salt.utils.crypt.reinit_crypto) else: process = threading.Thread( target=self._target, - args=(instance, self.opts, data, self.connected), + args=(instance, self.opts, data, self.connected, creds_map), name=name, ) @@ -1804,7 +1849,9 @@ def ctx(self): return exitstack @classmethod - def _target(cls, minion_instance, opts, data, connected): + def _target(cls, minion_instance, opts, data, connected, creds_map): + if creds_map: + salt.crypt.AsyncAuth.creds_map = creds_map if not minion_instance: minion_instance = cls(opts, load_grains=False) minion_instance.connected = connected @@ -1998,7 +2045,20 @@ def _thread_return(cls, minion_instance, opts, data): ret["return"] = "ERROR executing '{}': {}".format(function_name, exc) ret["out"] = "nested" ret["retcode"] = salt.defaults.exitcodes.EX_GENERIC + except SaltClientError as exc: + log.error( + "Problem executing '%s': %s", + function_name, + exc, + ) + ret["return"] = "ERROR executing '{}': {}".format(function_name, exc) + ret["out"] = "nested" + ret["retcode"] = salt.defaults.exitcodes.EX_GENERIC except TypeError as exc: + # XXX: This can ba extreemly missleading when something outside of a + # execution module call raises a TypeError. Make this it's own + # type of exception when we start validating state and + # execution argument module inputs. msg = "Passed invalid arguments to {}: {}\n{}".format( function_name, exc, @@ -2040,6 +2100,8 @@ def _thread_return(cls, minion_instance, opts, data): ret["jid"] = data["jid"] ret["fun"] = data["fun"] ret["fun_args"] = data["arg"] + if "user" in data: + ret["user"] = data["user"] if "master_id" in data: ret["master_id"] = data["master_id"] if "metadata" in data: @@ -2159,6 +2221,8 @@ def _thread_multi_return(cls, minion_instance, opts, data): ret["jid"] = data["jid"] ret["fun"] = data["fun"] ret["fun_args"] = data["arg"] + if "user" in data: + ret["user"] = data["user"] if "metadata" in data: ret["metadata"] = data["metadata"] if minion_instance.connected: @@ -2683,6 +2747,8 @@ def handle_event(self, package): if "proxy_target" in data and self.opts.get("metaproxy") == "deltaproxy": proxy_target = data["proxy_target"] + if proxy_target not in self.deltaproxy_objs: + raise salt.ext.tornado.gen.Return() _minion = self.deltaproxy_objs[proxy_target] else: _minion = self @@ -2694,11 +2760,22 @@ def handle_event(self, package): notify=data.get("notify", False), ) elif tag.startswith("__master_req_channel_payload"): - yield _minion.req_channel.send( - data, - timeout=_minion._return_retry_timer(), - tries=_minion.opts["return_retry_tries"], - ) + job_master = tag.rsplit("/", 1)[1] + if job_master == self.opts["master"]: + try: + yield _minion.req_channel.send( + data, + timeout=_minion._return_retry_timer(), + tries=_minion.opts["return_retry_tries"], + ) + except salt.exceptions.SaltReqTimeoutError: + log.error("Timeout encountered while sending %r request", data) + else: + log.debug( + "Skipping req for other master: cmd=%s master=%s", + data["cmd"], + job_master, + ) elif tag.startswith("pillar_refresh"): yield _minion.pillar_refresh( force_refresh=data.get("force_refresh", False), @@ -2815,10 +2892,8 @@ def handle_event(self, package): self.opts["master"], ) - self.req_channel = ( - salt.transport.client.AsyncReqChannel.factory( - self.opts, io_loop=self.io_loop - ) + self.req_channel = salt.channel.client.AsyncReqChannel.factory( + self.opts, io_loop=self.io_loop ) # put the current schedule into the new loaders @@ -3304,7 +3379,7 @@ def timeout_handler(*args): data["to"], io_loop=self.io_loop, callback=lambda _: None, - **kwargs + **kwargs, ) def _send_req_sync(self, load, timeout): @@ -3363,7 +3438,7 @@ def tune_in_no_block(self): # add handler to subscriber self.pub_channel.on_recv(self._process_cmd_socket) self.req_channel = salt.channel.client.ReqChannel.factory(self.opts) - self.async_req_channel = salt.channel.client.ReqChannel.factory(self.opts) + self.async_req_channel = salt.channel.client.AsyncReqChannel.factory(self.opts) def _process_cmd_socket(self, payload): if payload is not None and payload["enc"] == "aes": @@ -3863,10 +3938,10 @@ def _handle_decoded_payload(self, data): return mp_call(self, data) @classmethod - def _target(cls, minion_instance, opts, data, connected): + def _target(cls, minion_instance, opts, data, connected, creds_map): mp_call = _metaproxy_call(opts, "target") - return mp_call(cls, minion_instance, opts, data, connected) + return mp_call(cls, minion_instance, opts, data, connected, creds_map) @classmethod def _thread_return(cls, minion_instance, opts, data): @@ -3897,6 +3972,8 @@ def gen_modules(self, initial_load=False, context=None): salt '*' sys.reload_modules """ + # need sync of custom grains as may be used in pillar compilation + salt.utils.extmods.sync(self.opts, "grains") self.opts["grains"] = salt.loader.grains(self.opts) self.opts["pillar"] = salt.pillar.get_pillar( self.opts, diff --git a/salt/modules/acme.py b/salt/modules/acme.py index 3a15662eddc1..7952acc1aad9 100644 --- a/salt/modules/acme.py +++ b/salt/modules/acme.py @@ -2,7 +2,7 @@ ACME / Let's Encrypt module =========================== -.. versionadded:: 2016.3 +.. versionadded:: 2016.3.0 This module currently looks for certbot script in the $PATH as - certbot, diff --git a/salt/modules/apcups.py b/salt/modules/apcups.py index 084720f890b1..2b653061db5e 100644 --- a/salt/modules/apcups.py +++ b/salt/modules/apcups.py @@ -113,6 +113,3 @@ def status_battery(): return not data["TONBATT"] == "0 Seconds" return {"Error": "Battery status not available."} - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/aptpkg.py b/salt/modules/aptpkg.py index 8d609c831696..6014129d58dc 100644 --- a/salt/modules/aptpkg.py +++ b/salt/modules/aptpkg.py @@ -203,16 +203,27 @@ def repo_line(self): repo_line.append("#") repo_line.append(self.type) - opts = [] + opts = _get_opts(self.line) if self.architectures: - opts.append("arch={}".format(",".join(self.architectures))) + archs = ",".join(self.architectures) + opts["arch"]["full"] = "arch={}".format(archs) + opts["arch"]["value"] = self.architectures if self.signedby: - opts.append("signed-by={}".format(self.signedby)) + opts["signedby"]["full"] = "signed-by={}".format(self.signedby) + opts["signedby"]["value"] = self.signedby - if opts: - repo_line.append("[{}]".format(" ".join(opts))) + ordered_opts = [ + opt_type for opt_type, opt in opts.items() if opt["full"] != "" + ] + + for opt in opts.values(): + if opt["full"] != "": + ordered_opts[opt["index"]] = opt["full"] + + if ordered_opts: + repo_line.append("[{}]".format(" ".join(ordered_opts))) - repo_line = repo_line + [self.uri, self.dist, " ".join(self.comps)] + repo_line += [self.uri, self.dist, " ".join(self.comps)] if self.comment: repo_line.append("#{}".format(self.comment)) return " ".join(repo_line) + "\n" @@ -632,7 +643,7 @@ def install( reinstall=False, downloadonly=False, ignore_epoch=False, - **kwargs + **kwargs, ): """ .. versionchanged:: 2015.8.12,2016.3.3,2016.11.0 @@ -1003,7 +1014,7 @@ def install( unhold(pkgs=to_unhold) for cmd in cmds: - out = _call_apt(cmd) + out = _call_apt(cmd, **kwargs) if out["retcode"] != 0 and out["stderr"]: errors.append(out["stderr"]) @@ -1747,21 +1758,29 @@ def _get_opts(line): def _split_repo_str(repo): """ - Return APT source entry as a tuple. + Return APT source entry as a dictionary """ - split = SourceEntry(repo) + entry = SourceEntry(repo) + invalid = entry.invalid if not HAS_APT: - signedby = split.signedby + signedby = entry.signedby else: signedby = _get_opts(line=repo)["signedby"].get("value", "") - return ( - split.type, - split.architectures, - split.uri, - split.dist, - split.comps, - signedby, - ) + if signedby: + # python3-apt does not support signedby. So if signedby + # is in the repo we have to check our code to see if the + # repo is invalid ourselves. + _, invalid, _, _ = _invalid(repo) + + return { + "invalid": invalid, + "type": entry.type, + "architectures": entry.architectures, + "uri": entry.uri, + "dist": entry.dist, + "comps": entry.comps, + "signedby": signedby, + } def _consolidate_repo_sources(sources): @@ -1972,19 +1991,12 @@ def get_repo(repo, **kwargs): if repos: try: - ( - repo_type, - repo_architectures, - repo_uri, - repo_dist, - repo_comps, - repo_signedby, - ) = _split_repo_str(repo) + repo_entry = _split_repo_str(repo) if ppa_auth: - uri_match = re.search("(http[s]?://)(.+)", repo_uri) + uri_match = re.search("(http[s]?://)(.+)", repo_entry["uri"]) if uri_match: if not uri_match.group(2).startswith(ppa_auth): - repo_uri = "{}{}@{}".format( + repo_entry["uri"] = "{}{}@{}".format( uri_match.group(1), ppa_auth, uri_match.group(2) ) except SyntaxError: @@ -1995,13 +2007,13 @@ def get_repo(repo, **kwargs): for source in repos.values(): for sub in source: if ( - sub["type"] == repo_type - and sub["uri"] == repo_uri - and sub["dist"] == repo_dist + sub["type"] == repo_entry["type"] + and sub["uri"].rstrip("/") == repo_entry["uri"].rstrip("/") + and sub["dist"] == repo_entry["dist"] ): - if not repo_comps: + if not repo_entry["comps"]: return sub - for comp in repo_comps: + for comp in repo_entry["comps"]: if comp in sub.get("comps", []): return sub return {} @@ -2049,14 +2061,7 @@ def del_repo(repo, **kwargs): if repos: deleted_from = dict() try: - ( - repo_type, - repo_architectures, - repo_uri, - repo_dist, - repo_comps, - repo_signedby, - ) = _split_repo_str(repo) + repo_entry = _split_repo_str(repo) except SyntaxError: raise SaltInvocationError( "Error: repo '{}' not a well formatted definition".format(repo) @@ -2064,15 +2069,15 @@ def del_repo(repo, **kwargs): for source in repos: if ( - source.type == repo_type - and source.architectures == repo_architectures - and source.uri == repo_uri - and source.dist == repo_dist + source.type == repo_entry["type"] + and source.architectures == repo_entry["architectures"] + and source.uri.rstrip("/") == repo_entry["uri"].rstrip("/") + and source.dist == repo_entry["dist"] ): s_comps = set(source.comps) - r_comps = set(repo_comps) - if s_comps.intersection(r_comps): + r_comps = set(repo_entry["comps"]) + if s_comps.intersection(r_comps) or (not s_comps and not r_comps): deleted_from[source.file] = 0 source.comps = list(s_comps.difference(r_comps)) if not source.comps: @@ -2085,15 +2090,15 @@ def del_repo(repo, **kwargs): # measure if ( is_ppa - and repo_type == "deb" + and repo_entry["type"] == "deb" and source.type == "deb-src" - and source.uri == repo_uri - and source.dist == repo_dist + and source.uri == repo_entry["uri"] + and source.dist == repo_entry["dist"] ): s_comps = set(source.comps) - r_comps = set(repo_comps) - if s_comps.intersection(r_comps): + r_comps = set(repo_entry["comps"]) + if s_comps.intersection(r_comps) or (not s_comps and not r_comps): deleted_from[source.file] = 0 source.comps = list(s_comps.difference(r_comps)) if not source.comps: @@ -2749,23 +2754,22 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): mod_source = None try: - ( - repo_type, - repo_architectures, - repo_uri, - repo_dist, - repo_comps, - repo_signedby, - ) = _split_repo_str(repo) + repo_entry = _split_repo_str(repo) + if repo_entry.get("invalid"): + raise SaltInvocationError( + f"Name {repo} is not valid. This must be the complete repo entry as seen in the sources file" + ) except SyntaxError: raise SyntaxError( "Error: repo '{}' not a well formatted definition".format(repo) ) - full_comp_list = {comp.strip() for comp in repo_comps} + full_comp_list = {comp.strip() for comp in repo_entry["comps"]} no_proxy = __salt__["config.option"]("no_proxy") - kwargs["signedby"] = pathlib.Path(repo_signedby) if repo_signedby else "" + kwargs["signedby"] = ( + pathlib.Path(repo_entry["signedby"]) if repo_entry["signedby"] else "" + ) if not aptkey and not kwargs["signedby"]: raise SaltInvocationError("missing 'signedby' option when apt-key is missing") @@ -2813,13 +2817,17 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): else: if not aptkey: key_file = kwargs["signedby"] - add_repo_key( + if not add_repo_key( keyid=key, keyserver=keyserver, aptkey=False, keydir=key_file.parent, keyfile=key_file, - ) + ): + raise CommandExecutionError( + f"Error: Could not add key: {key}" + ) + else: cmd = [ "apt-key", @@ -2859,7 +2867,7 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): func_kwargs["keydir"] = kwargs.get("signedby").parent if not add_repo_key(path=str(fn_), aptkey=False, **func_kwargs): - return False + raise CommandExecutionError(f"Error: Could not add key: {str(fn_)}") else: cmd = ["apt-key", "add", str(fn_)] out = __salt__["cmd.run_stdout"](cmd, python_shell=False, **kwargs) @@ -2888,7 +2896,7 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): if "architectures" in kwargs: kwargs["architectures"] = kwargs["architectures"].split(",") else: - kwargs["architectures"] = repo_architectures + kwargs["architectures"] = repo_entry["architectures"] if "disabled" in kwargs: kwargs["disabled"] = salt.utils.data.is_true(kwargs["disabled"]) @@ -2904,9 +2912,9 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): # we are not returning bogus data because the source line # has already been modified on a previous run. repo_matches = ( - apt_source.type == repo_type - and apt_source.uri.rstrip("/") == repo_uri.rstrip("/") - and apt_source.dist == repo_dist + apt_source.type == repo_entry["type"] + and apt_source.uri.rstrip("/") == repo_entry["uri"].rstrip("/") + and apt_source.dist == repo_entry["dist"] ) kw_matches = apt_source.dist == kw_dist and apt_source.type == kw_type @@ -2924,6 +2932,7 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): if "comments" in kwargs: kwargs["comments"] = salt.utils.pkg.deb.combine_comments(kwargs["comments"]) + repo_source_entry = SourceEntry(repo) if not mod_source: mod_source = SourceEntry(repo) if "comments" in kwargs: @@ -2932,12 +2941,7 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): elif "comments" in kwargs: mod_source.comment = kwargs["comments"] - if HAS_APT: - # workaround until python3-apt supports signedby - if str(mod_source) != str(SourceEntry(repo)) and "signed-by" in str(mod_source): - rline = SourceEntry(repo) - mod_source.line = rline.line - + mod_source.line = repo_source_entry.line if not mod_source.line.endswith("\n"): mod_source.line = mod_source.line + "\n" @@ -2945,8 +2949,8 @@ def mod_repo(repo, saltenv="base", aptkey=True, **kwargs): if key in _MODIFY_OK and hasattr(mod_source, key): setattr(mod_source, key, kwargs[key]) - if mod_source.uri != repo_uri: - mod_source.uri = repo_uri + if mod_source.uri != repo_entry["uri"]: + mod_source.uri = repo_entry["uri"] mod_source.line = mod_source.str() sources.save() @@ -3502,7 +3506,7 @@ def _get_http_proxy_url(): def list_downloaded(root=None, **kwargs): """ - .. versionadded:: 3000? + .. versionadded:: 3000 List prefetched packages downloaded by apt in the local disk. diff --git a/salt/modules/at.py b/salt/modules/at.py index bf77cf0b7bdc..9c646dfb2e8c 100644 --- a/salt/modules/at.py +++ b/salt/modules/at.py @@ -245,6 +245,7 @@ def at(*args, **kwargs): # pylint: disable=C0103 salt '*' at.at [tag=] [runas=] salt '*' at.at 12:05am '/sbin/reboot' tag=reboot salt '*' at.at '3:05am +3 days' 'bin/myscript' tag=nightly runas=jim + salt '*' at.at '"22:02"' 'bin/myscript' tag=nightly runas=jim """ if len(args) < 2: diff --git a/salt/modules/at_solaris.py b/salt/modules/at_solaris.py index 462cf9b8c94a..ee8e2e67b358 100644 --- a/salt/modules/at_solaris.py +++ b/salt/modules/at_solaris.py @@ -342,6 +342,3 @@ def jobcheck(**kwargs): return {"error": "You have given a condition"} return _atq(**kwargs) - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/bridge.py b/salt/modules/bridge.py index 1bac60f2b09e..edc9c9dc3773 100644 --- a/salt/modules/bridge.py +++ b/salt/modules/bridge.py @@ -489,6 +489,3 @@ def stp(br=None, state="disable", iface=None): return _os_dispatch("stp", br, states[state], iface) else: return False - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/chocolatey.py b/salt/modules/chocolatey.py index caa7b0bae8f8..ade057da95c6 100644 --- a/salt/modules/chocolatey.py +++ b/salt/modules/chocolatey.py @@ -1,5 +1,5 @@ """ -A dead simple module wrapping calls to the Chocolatey package manager +A module that wraps calls to the Chocolatey package manager (http://chocolatey.org) .. versionadded:: 2014.1.0 @@ -40,13 +40,10 @@ def __virtual__(): salt-minion running as SYSTEM. """ if not salt.utils.platform.is_windows(): - return (False, "Cannot load module chocolatey: Chocolatey requires Windows") + return False, "Chocolatey: Requires Windows" if __grains__["osrelease"] in ("XP", "2003Server"): - return ( - False, - "Cannot load module chocolatey: Chocolatey requires Windows Vista or later", - ) + return False, "Chocolatey: Requires Windows Vista or later" return __virtualname__ @@ -111,6 +108,7 @@ def _find_chocolatey(): os.path.join( os.environ.get("ProgramData"), "Chocolatey", "bin", "chocolatey.exe" ), + os.path.join(os.environ.get("ProgramData"), "Chocolatey", "bin", "choco.exe"), os.path.join( os.environ.get("SystemDrive"), "Chocolatey", "bin", "chocolatey.bat" ), @@ -207,7 +205,7 @@ def bootstrap(force=False, source=None): except CommandExecutionError: choc_path = None if choc_path and not force: - return "Chocolatey found at {}".format(choc_path) + return f"Chocolatey found at {choc_path}" temp_dir = tempfile.gettempdir() @@ -335,7 +333,7 @@ def bootstrap(force=False, source=None): if not os.path.exists(script): raise CommandExecutionError( - "Failed to find Chocolatey installation script: {}".format(script) + f"Failed to find Chocolatey installation script: {script}" ) # Run the Chocolatey bootstrap @@ -354,12 +352,12 @@ def unbootstrap(): """ Uninstall chocolatey from the system by doing the following: - .. versionadded:: 3001 - - Delete the Chocolatey Directory - Remove Chocolatey from the path - Remove Chocolatey environment variables + .. versionadded:: 3001 + Returns: list: A list of items that were removed, otherwise an empty list @@ -377,7 +375,7 @@ def unbootstrap(): if os.path.exists(choco_dir): log.debug("Removing Chocolatey directory: %s", choco_dir) __salt__["file.remove"](path=choco_dir, force=True) - removed.append("Removed Directory: {}".format(choco_dir)) + removed.append(f"Removed Directory: {choco_dir}") else: known_paths = [ os.path.join(os.environ.get("ProgramData"), "Chocolatey"), @@ -387,7 +385,7 @@ def unbootstrap(): if os.path.exists(path): log.debug("Removing Chocolatey directory: %s", path) __salt__["file.remove"](path=path, force=True) - removed.append("Removed Directory: {}".format(path)) + removed.append(f"Removed Directory: {path}") # Delete all Chocolatey environment variables for env_var in __salt__["environ.items"](): @@ -399,14 +397,14 @@ def unbootstrap(): __salt__["environ.setval"]( key=env_var, val=False, false_unsets=True, permanent="HKCU" ) - removed.append("Removed Environment Var: {}".format(env_var)) + removed.append(f"Removed Environment Var: {env_var}") # Remove Chocolatey from the path: for path in __salt__["win_path.get_path"](): if "chocolatey" in path.lower(): log.debug("Removing Chocolatey path item: %s", path) __salt__["win_path.remove"](path=path, rehash=True) - removed.append("Removed Path Item: {}".format(path)) + removed.append(f"Removed Path Item: {path}") return removed @@ -440,10 +438,10 @@ def list_( None is passed. Default is None. local_only (bool): - Display packages only installed locally. Default is False. + Only display packages that are installed locally. Default is False. exact (bool): - Display only packages that match ``narrow`` exactly. Default is + Only display packages that match ``narrow`` exactly. Default is False. .. versionadded:: 2017.7.0 @@ -459,7 +457,17 @@ def list_( salt '*' chocolatey.list all_versions=True """ choc_path = _find_chocolatey() - cmd = [choc_path, "list"] + # https://docs.chocolatey.org/en-us/guides/upgrading-to-chocolatey-v2-v6 + if Version(chocolatey_version()) < Version("2.0.0"): + cmd = [choc_path, "list"] + if local_only: + cmd.append("--local-only") + else: + if local_only: + # Starting with 2.0.0, list only returns local packages + cmd = [choc_path, "list"] + else: + cmd = [choc_path, "search"] if narrow: cmd.append(narrow) if salt.utils.data.is_true(all_versions): @@ -468,8 +476,6 @@ def list_( cmd.append("--prerelease") if source: cmd.extend(["--source", source]) - if local_only: - cmd.append("--local-only") if exact: cmd.append("--exact") @@ -517,7 +523,11 @@ def list_webpi(): salt '*' chocolatey.list_webpi """ choc_path = _find_chocolatey() - cmd = [choc_path, "list", "--source", "webpi"] + # https://docs.chocolatey.org/en-us/guides/upgrading-to-chocolatey-v2-v6 + if Version(chocolatey_version()) < Version("2.0.0"): + cmd = [choc_path, "list", "--source", "webpi"] + else: + cmd = [choc_path, "search", "--source", "webpi"] result = __salt__["cmd.run_all"](cmd, python_shell=False) if result["retcode"] != 0: @@ -542,7 +552,11 @@ def list_windowsfeatures(): salt '*' chocolatey.list_windowsfeatures """ choc_path = _find_chocolatey() - cmd = [choc_path, "list", "--source", "windowsfeatures"] + # https://docs.chocolatey.org/en-us/guides/upgrading-to-chocolatey-v2-v6 + if Version(chocolatey_version()) < Version("2.0.0"): + cmd = [choc_path, "list", "--source", "windowsfeatures"] + else: + cmd = [choc_path, "search", "--source", "windowsfeatures"] result = __salt__["cmd.run_all"](cmd, python_shell=False) if result["retcode"] != 0: @@ -576,12 +590,12 @@ def install( version (str): Install a specific version of the package. Defaults to latest - version. Default is None. + version. Default is ``None``. source (str): Chocolatey repository (directory, share or remote URL feed) the package comes from. Defaults to the official Chocolatey feed. - Default is None. + Default is ``None``. Alternate Sources: @@ -593,36 +607,38 @@ def install( force (bool): Reinstall the current version of an existing package. Do not use - with ``allow_multiple``. Default is False. + with ``allow_multiple``. Default is ``False``. pre_versions (bool): - Include pre-release packages. Default is False. + Include pre-release packages. Default is ``False``. install_args (str): A list of install arguments you want to pass to the installation - process i.e product key or feature list. Default is None. + process, i.e. product key or feature list. Default is ``None``. override_args (bool): Set to true if you want to override the original install arguments (for the native installer) in the package and use your own. When - this is set to False install_args will be appended to the end of the - default arguments. Default is None. + this is set to ``False`` install_args will be appended to the end of + the default arguments. Default is ``None``. force_x86 (bool): - Force x86 (32bit) installation on 64 bit systems. Default is False. + Force x86 (32bit) installation on 64bit systems. Default is + ``False``. package_args (str): - Arguments you want to pass to the package. Default is None. + Arguments you want to pass to the package. Default is ``None``. allow_multiple (bool): Allow multiple versions of the package to be installed. Do not use - with ``force``. Does not work with all packages. Default is False. + with ``force``. Does not work with all packages. Default is + ``False``. .. versionadded:: 2017.7.0 execution_timeout (str): Chocolatey execution timeout value you want to pass to the - installation process. Default is None. + installation process. Default is ``None``. .. versionadded:: 2018.3.0 @@ -687,18 +703,21 @@ def install_cygwin(name, install_args=None, override_args=False): """ Instructs Chocolatey to install a package via Cygwin. - name - The name of the package to be installed. Only accepts a single argument. + Args: - install_args - A list of install arguments you want to pass to the installation process - i.e product key or feature list + name (str): + The name of the package to be installed. Only accepts a single + argument. - override_args - Set to true if you want to override the original install arguments (for - the native installer) in the package and use your own. When this is set - to False install_args will be appended to the end of the default - arguments + install_args (str): + A list of install arguments you want to pass to the installation + process, i.e. product key or feature list + + override_args (bool): + Set to ``True`` if you want to override the original install + arguments (for the native installer) in the package and use your + own. When this is set to ``False`` install_args will be appended to + the end of the default arguments CLI Example: @@ -716,22 +735,25 @@ def install_gem(name, version=None, install_args=None, override_args=False): """ Instructs Chocolatey to install a package via Ruby's Gems. - name - The name of the package to be installed. Only accepts a single argument. + Args: - version - Install a specific version of the package. Defaults to latest version - available. + name (str): + The name of the package to be installed. Only accepts a single + argument. - install_args - A list of install arguments you want to pass to the installation process - i.e product key or feature list + version (str): + Install a specific version of the package. Defaults to the latest + version available. - override_args - Set to true if you want to override the original install arguments (for - the native installer) in the package and use your own. When this is set - to False install_args will be appended to the end of the default - arguments + install_args (str): + A list of install arguments you want to pass to the installation + process, i.e. product key or feature list + + override_args (bool): + Set to ``True`` if you want to override the original install + arguments (for the native installer) in the package and use your + own. When this is set to ``False`` install_args will be appended to + the end of the default arguments CLI Example: @@ -760,16 +782,19 @@ def install_missing(name, version=None, source=None): ``installmissing`` is deprecated as of that version and will be removed in Chocolatey 1.0. - name - The name of the package to be installed. Only accepts a single argument. + Args: - version - Install a specific version of the package. Defaults to latest version - available. + name (str): + The name of the package to be installed. Only accepts a single + argument. - source - Chocolatey repository (directory, share or remote URL feed) the package - comes from. Defaults to the official Chocolatey feed. + version (str): + Install a specific version of the package. Defaults to the latest + version available. + + source (str): + Chocolatey repository (directory, share or remote URL feed) the + package comes from. Defaults to the official Chocolatey feed. CLI Example: @@ -803,22 +828,25 @@ def install_python(name, version=None, install_args=None, override_args=False): """ Instructs Chocolatey to install a package via Python's easy_install. - name - The name of the package to be installed. Only accepts a single argument. + Args: + + name (str): + The name of the package to be installed. Only accepts a single + argument. - version - Install a specific version of the package. Defaults to latest version - available. + version (str): + Install a specific version of the package. Defaults to the latest + version available. - install_args - A list of install arguments you want to pass to the installation process - i.e product key or feature list + install_args (str): + A list of install arguments you want to pass to the installation + process, i.e. product key or feature list. - override_args - Set to true if you want to override the original install arguments (for - the native installer) in the package and use your own. When this is set - to False install_args will be appended to the end of the default - arguments + override_args (bool): + Set to ``True`` if you want to override the original install + arguments (for the native installer) in the package and use your + own. When this is set to ``False`` install_args will be appended to + the end of the default arguments. CLI Example: @@ -842,8 +870,11 @@ def install_windowsfeatures(name): Instructs Chocolatey to install a Windows Feature via the Deployment Image Servicing and Management tool. - name - The name of the feature to be installed. Only accepts a single argument. + Args: + + name (str): + The name of the feature to be installed. Only accepts a single + argument. CLI Example: @@ -858,18 +889,21 @@ def install_webpi(name, install_args=None, override_args=False): """ Instructs Chocolatey to install a package via the Microsoft Web PI service. - name - The name of the package to be installed. Only accepts a single argument. + Args: + + name (str): + The name of the package to be installed. Only accepts a single + argument. - install_args - A list of install arguments you want to pass to the installation process - i.e product key or feature list + install_args (str): + A list of install arguments you want to pass to the installation + process, i.e. product key or feature list. - override_args - Set to true if you want to override the original install arguments (for - the native installer) in the package and use your own. When this is set - to False install_args will be appended to the end of the default - arguments + override_args (bool): + Set to ``True`` if you want to override the original install + arguments (for the native installer) in the package and use your + own. When this is set to ``False`` install_args will be appended to + the end of the default arguments. CLI Example: @@ -883,27 +917,35 @@ def install_webpi(name, install_args=None, override_args=False): ) -def uninstall(name, version=None, uninstall_args=None, override_args=False): +def uninstall( + name, + version=None, + uninstall_args=None, + override_args=False, + force=False, +): """ Instructs Chocolatey to uninstall a package. - name - The name of the package to be uninstalled. Only accepts a single - argument. + Args: - version - Uninstalls a specific version of the package. Defaults to latest version - installed. + name (str): + The name of the package to be uninstalled. Only accepts a single + argument. - uninstall_args - A list of uninstall arguments you want to pass to the uninstallation - process i.e product key or feature list + version (str): + Uninstalls a specific version of the package. Defaults to the latest + version installed. + + uninstall_args (str): + A list of uninstall arguments you want to pass to the uninstallation + process, i.e. product key or feature list. - override_args - Set to true if you want to override the original uninstall arguments - (for the native uninstaller) in the package and use your own. When this - is set to False uninstall_args will be appended to the end of the - default arguments + override_args + Set to ``True`` if you want to override the original uninstall + arguments (for the native uninstaller) in the package and use your + own. When this is set to ``False`` uninstall_args will be appended + to the end of the default arguments. CLI Example: @@ -920,11 +962,13 @@ def uninstall(name, version=None, uninstall_args=None, override_args=False): if uninstall_args: cmd.extend(["--uninstallarguments", uninstall_args]) if override_args: - cmd.extend(["--overridearguments"]) + cmd.append("--overridearguments") + if force: + cmd.append("--force") cmd.extend(_yes()) result = __salt__["cmd.run_all"](cmd, python_shell=False) - if result["retcode"] not in [0, 1605, 1614, 1641]: + if result["retcode"] not in [0, 1, 1605, 1614, 1641]: err = "Running chocolatey failed: {}".format(result["stdout"]) raise CommandExecutionError(err) @@ -963,26 +1007,27 @@ def upgrade( package comes from. Defaults to the official Chocolatey feed. force (bool): - Reinstall the **same** version already installed + Reinstall the **same** version already installed. pre_versions (bool): - Include pre-release packages in comparison. Defaults to False. + Include pre-release packages in comparison. Defaults to ``False``. install_args (str): A list of install arguments you want to pass to the installation - process i.e product key or feature list + process, i.e. product key or feature list. - override_args (str): - Set to true if you want to override the original install arguments - (for the native installer) in the package and use your own. When - this is set to False install_args will be appended to the end of the - default arguments + override_args (bool): + Set to ``True`` if you want to override the original install + arguments (for the native installer) in the package and use your + own. When this is set to ``False`` install_args will be appended to + the end of the default arguments. - force_x86 - Force x86 (32bit) installation on 64 bit systems. Defaults to false. + force_x86 (bool): + Force x86 (32bit) installation on 64bit systems. Defaults to + ``False``. - package_args - A list of arguments you want to pass to the package + package_args (str): + A list of arguments you want to pass to the package. Returns: str: Results of the ``chocolatey`` command @@ -1030,16 +1075,18 @@ def update(name, source=None, pre_versions=False): """ Instructs Chocolatey to update packages on the system. - name - The name of the package to update, or "all" to update everything - installed on the system. + Args: - source - Chocolatey repository (directory, share or remote URL feed) the package - comes from. Defaults to the official Chocolatey feed. + name (str): + The name of the package to update, or "all" to update everything + installed on the system. - pre_versions - Include pre-release packages in comparison. Defaults to False. + source (str): + Chocolatey repository (directory, share or remote URL feed) the + package comes from. Defaults to the official Chocolatey feed. + + pre_versions (bool): + Include pre-release packages in comparison. Defaults to ``False``. CLI Example: @@ -1083,15 +1130,15 @@ def version(name, check_remote=False, source=None, pre_versions=False): check_remote (bool): Get the version number of the latest package from the remote feed. - Default is False. + Default is ``False``. source (str): Chocolatey repository (directory, share or remote URL feed) the package comes from. Defaults to the official Chocolatey feed. - Default is None. + Default is ``None``. pre_versions (bool): - Include pre-release packages in comparison. Default is False. + Include pre-release packages in comparison. Default is ``False``. Returns: dict: A dictionary of currently installed software and versions @@ -1107,21 +1154,23 @@ def version(name, check_remote=False, source=None, pre_versions=False): packages = {} lower_name = name.lower() - for pkg in installed: - if lower_name in pkg.lower(): - packages[pkg] = installed[pkg] + if installed: + for pkg in installed: + if lower_name == pkg.lower(): + packages.setdefault(pkg, {}) + packages[pkg]["installed"] = installed[pkg] if check_remote: - available = list_(narrow=name, pre_versions=pre_versions, source=source) - - for pkg in packages: - # Grab the current version from the package that was installed - packages[pkg] = {"installed": installed[pkg]} - - # If there's a remote package available, then also include that - # in the dictionary that we return. - if pkg in available: - packages[pkg]["available"] = available[pkg] + # If there's a remote package available, then also include that + # in the dictionary that we return. + available = list_( + narrow=name, local_only=False, pre_versions=pre_versions, source=source + ) + if available: + for pkg in available: + if lower_name == pkg.lower(): + packages.setdefault(pkg, {}) + packages[pkg]["available"] = available[pkg] return packages @@ -1130,25 +1179,27 @@ def add_source(name, source_location, username=None, password=None, priority=Non """ Instructs Chocolatey to add a source. - name - The name of the source to be added as a chocolatey repository. + Args: + + name (str): + The name of the source to be added as a chocolatey repository. - source - Location of the source you want to work with. + source (str): + Location of the source you want to work with. - username - Provide username for chocolatey sources that need authentication - credentials. + username (str): + Provide username for chocolatey sources that need authentication + credentials. - password - Provide password for chocolatey sources that need authentication - credentials. + password (str): + Provide password for chocolatey sources that need authentication + credentials. - priority - The priority order of this source as compared to other sources, - lower is better. Defaults to 0 (no priority). All priorities - above 0 will be evaluated first, then zero-based values will be - evaluated in config file order. + priority (int): + The priority order of this source as compared to other sources, + lower is better. Defaults to 0 (no priority). All priorities + above 0 will be evaluated first, then zero-based values will be + evaluated in config file order. CLI Example: @@ -1187,12 +1238,13 @@ def _change_source_state(name, state): """ Instructs Chocolatey to change the state of a source. - name - Name of the repository to affect. + Args: - state - State in which you want the chocolatey repository. + name (str): + Name of the repository to affect. + state (str): + State in which you want the chocolatey repository. """ cmd = [_find_chocolatey(), "source", state, "--name", name] result = __salt__["cmd.run_all"](cmd, python_shell=False) @@ -1208,8 +1260,10 @@ def enable_source(name): """ Instructs Chocolatey to enable a source. - name - Name of the source repository to enable. + Args: + + name (str): + Name of the source repository to enable. CLI Example: @@ -1225,8 +1279,10 @@ def disable_source(name): """ Instructs Chocolatey to disable a source. - name - Name of the source repository to disable. + Args: + + name (str): + Name of the source repository to disable. CLI Example: diff --git a/salt/modules/chroot.py b/salt/modules/chroot.py index aa0fe7685452..78c974cc1963 100644 --- a/salt/modules/chroot.py +++ b/salt/modules/chroot.py @@ -242,13 +242,14 @@ def _create_and_execute_salt_state(root, chunks, file_refs, test, hash_type): """ # Create the tar containing the state pkg and relevant files. salt.client.ssh.wrapper.state._cleanup_slsmod_low_data(chunks) - trans_tar = salt.client.ssh.state.prep_trans_tar( - salt.fileclient.get_file_client(__opts__), - chunks, - file_refs, - __pillar__.value(), - root, - ) + with salt.fileclient.get_file_client(__opts__) as client: + trans_tar = salt.client.ssh.state.prep_trans_tar( + client, + chunks, + file_refs, + __pillar__.value(), + root, + ) trans_tar_sum = salt.utils.hashutils.get_hash(trans_tar, hash_type) ret = None @@ -314,6 +315,7 @@ def sls(root, mods, saltenv="base", test=None, exclude=None, **kwargs): # Clone the options data and apply some default values. May not be # needed, as this module just delegate opts = salt.utils.state.get_sls_opts(__opts__, **kwargs) + st_ = salt.client.ssh.state.SSHHighState( opts, pillar, __salt__, salt.fileclient.get_file_client(__opts__) ) @@ -321,30 +323,31 @@ def sls(root, mods, saltenv="base", test=None, exclude=None, **kwargs): if isinstance(mods, str): mods = mods.split(",") - high_data, errors = st_.render_highstate({saltenv: mods}) - if exclude: - if isinstance(exclude, str): - exclude = exclude.split(",") - if "__exclude__" in high_data: - high_data["__exclude__"].extend(exclude) - else: - high_data["__exclude__"] = exclude - - high_data, ext_errors = st_.state.reconcile_extend(high_data) - errors += ext_errors - errors += st_.state.verify_high(high_data) - if errors: - return errors - - high_data, req_in_errors = st_.state.requisite_in(high_data) - errors += req_in_errors - if errors: - return errors - - high_data = st_.state.apply_exclude(high_data) - - # Compile and verify the raw chunks - chunks = st_.state.compile_high_data(high_data) + with st_: + high_data, errors = st_.render_highstate({saltenv: mods}) + if exclude: + if isinstance(exclude, str): + exclude = exclude.split(",") + if "__exclude__" in high_data: + high_data["__exclude__"].extend(exclude) + else: + high_data["__exclude__"] = exclude + + high_data, ext_errors = st_.state.reconcile_extend(high_data) + errors += ext_errors + errors += st_.state.verify_high(high_data) + if errors: + return errors + + high_data, req_in_errors = st_.state.requisite_in(high_data) + errors += req_in_errors + if errors: + return errors + + high_data = st_.state.apply_exclude(high_data) + + # Compile and verify the raw chunks + chunks = st_.state.compile_high_data(high_data) file_refs = salt.client.ssh.state.lowstate_file_refs( chunks, salt.client.ssh.wrapper.state._merge_extra_filerefs( diff --git a/salt/modules/cmdmod.py b/salt/modules/cmdmod.py index 29d1c500782c..4822971d5522 100644 --- a/salt/modules/cmdmod.py +++ b/salt/modules/cmdmod.py @@ -217,7 +217,7 @@ def _gather_pillar(pillarenv, pillar_override): """ pillar = salt.pillar.get_pillar( __opts__, - __grains__, + __grains__.value(), __opts__["id"], __opts__["saltenv"], pillar_override=pillar_override, diff --git a/salt/modules/container_resource.py b/salt/modules/container_resource.py index a29cba2e468b..0a44ce3e5180 100644 --- a/salt/modules/container_resource.py +++ b/salt/modules/container_resource.py @@ -8,13 +8,11 @@ :mod:`docker ` execution modules. They provide for common logic to be re-used for common actions. """ - - import copy import functools import logging import os -import pipes +import shlex import time import traceback @@ -68,18 +66,16 @@ def _nsenter(pid): """ Return the nsenter command to attach to the named container """ - return "nsenter --target {} --mount --uts --ipc --net --pid".format(pid) + return f"nsenter --target {pid} --mount --uts --ipc --net --pid" -def _get_md5(name, path, run_func): +def _get_sha256(name, path, run_func): """ - Get the MD5 checksum of a file from a container + Get the sha256 checksum of a file from a container """ - output = run_func(name, "md5sum {}".format(pipes.quote(path)), ignore_retcode=True)[ - "stdout" - ] + ret = run_func(name, f"sha256sum {shlex.quote(path)}", ignore_retcode=True) try: - return output.split()[0] + return ret["stdout"].split()[0] except IndexError: # Destination file does not exist or could not be accessed return None @@ -102,10 +98,10 @@ def cache_file(source): if source.startswith("salt://"): cached_source = __salt__["cp.cache_file"](source) if not cached_source: - raise CommandExecutionError("Unable to cache {}".format(source)) + raise CommandExecutionError(f"Unable to cache {source}") return cached_source except AttributeError: - raise SaltInvocationError("Invalid source file {}".format(source)) + raise SaltInvocationError(f"Invalid source file {source}") return source @@ -164,55 +160,47 @@ def run( if exec_driver == "lxc-attach": full_cmd = "lxc-attach " if path: - full_cmd += "-P {} ".format(pipes.quote(path)) + full_cmd += f"-P {shlex.quote(path)} " if keep_env is not True: full_cmd += "--clear-env " if "PATH" not in to_keep: - full_cmd += "--set-var {} ".format(PATH) + full_cmd += f"--set-var {PATH} " # --clear-env results in a very restrictive PATH # (/bin:/usr/bin), use a good fallback. full_cmd += " ".join( [ - "--set-var {}={}".format(x, pipes.quote(os.environ[x])) + f"--set-var {x}={shlex.quote(os.environ[x])}" for x in to_keep if x in os.environ ] ) - full_cmd += " -n {} -- {}".format(pipes.quote(name), cmd) + full_cmd += f" -n {shlex.quote(name)} -- {cmd}" elif exec_driver == "nsenter": - pid = __salt__["{}.pid".format(container_type)](name) - full_cmd = "nsenter --target {} --mount --uts --ipc --net --pid -- ".format(pid) + pid = __salt__[f"{container_type}.pid"](name) + full_cmd = f"nsenter --target {pid} --mount --uts --ipc --net --pid -- " if keep_env is not True: full_cmd += "env -i " if "PATH" not in to_keep: - full_cmd += "{} ".format(PATH) + full_cmd += f"{PATH} " full_cmd += " ".join( - [ - "{}={}".format(x, pipes.quote(os.environ[x])) - for x in to_keep - if x in os.environ - ] + [f"{x}={shlex.quote(os.environ[x])}" for x in to_keep if x in os.environ] ) - full_cmd += " {}".format(cmd) + full_cmd += f" {cmd}" elif exec_driver == "docker-exec": # We're using docker exec on the CLI as opposed to via docker-py, since # the Docker API doesn't return stdout and stderr separately. full_cmd = "docker exec " if stdin: full_cmd += "-i " - full_cmd += "{} ".format(name) + full_cmd += f"{name} " if keep_env is not True: full_cmd += "env -i " if "PATH" not in to_keep: - full_cmd += "{} ".format(PATH) + full_cmd += f"{PATH} " full_cmd += " ".join( - [ - "{}={}".format(x, pipes.quote(os.environ[x])) - for x in to_keep - if x in os.environ - ] + [f"{x}={shlex.quote(os.environ[x])}" for x in to_keep if x in os.environ] ) - full_cmd += " {}".format(cmd) + full_cmd += f" {cmd}" if not use_vt: ret = __salt__[cmd_func]( @@ -299,13 +287,13 @@ def copy_to( salt myminion container_resource.copy_to mycontainer /local/file/path /container/file/path container_type=docker exec_driver=nsenter """ # Get the appropriate functions - state = __salt__["{}.state".format(container_type)] + state = __salt__[f"{container_type}.state"] def run_all(*args, **akwargs): akwargs = copy.deepcopy(akwargs) if container_type in ["lxc"] and "path" not in akwargs: akwargs["path"] = path - return __salt__["{}.run_all".format(container_type)](*args, **akwargs) + return __salt__[f"{container_type}.run_all"](*args, **akwargs) state_kwargs = {} cmd_kwargs = {"ignore_retcode": True} @@ -321,7 +309,7 @@ def _state(name): c_state = _state(name) if c_state != "running": - raise CommandExecutionError("Container '{}' is not running".format(name)) + raise CommandExecutionError(f"Container '{name}' is not running") local_file = cache_file(source) source_dir, source_name = os.path.split(local_file) @@ -330,17 +318,14 @@ def _state(name): if not os.path.isabs(local_file): raise SaltInvocationError("Source path must be absolute") elif not os.path.exists(local_file): - raise SaltInvocationError("Source file {} does not exist".format(local_file)) + raise SaltInvocationError(f"Source file {local_file} does not exist") elif not os.path.isfile(local_file): raise SaltInvocationError("Source must be a regular file") # Destination file sanity checks if not os.path.isabs(dest): raise SaltInvocationError("Destination path must be absolute") - if ( - run_all(name, "test -d {}".format(pipes.quote(dest)), **cmd_kwargs)["retcode"] - == 0 - ): + if run_all(name, f"test -d {shlex.quote(dest)}", **cmd_kwargs)["retcode"] == 0: # Destination is a directory, full path to dest file will include the # basename of the source file. dest = os.path.join(dest, source_name) @@ -350,14 +335,12 @@ def _state(name): # parent directory. dest_dir, dest_name = os.path.split(dest) if ( - run_all(name, "test -d {}".format(pipes.quote(dest_dir)), **cmd_kwargs)[ - "retcode" - ] + run_all(name, f"test -d {shlex.quote(dest_dir)}", **cmd_kwargs)["retcode"] != 0 ): if makedirs: result = run_all( - name, "mkdir -p {}".format(pipes.quote(dest_dir)), **cmd_kwargs + name, f"mkdir -p {shlex.quote(dest_dir)}", **cmd_kwargs ) if result["retcode"] != 0: error = ( @@ -375,10 +358,7 @@ def _state(name): ) if ( not overwrite - and run_all(name, "test -e {}".format(pipes.quote(dest)), **cmd_kwargs)[ - "retcode" - ] - == 0 + and run_all(name, f"test -e {shlex.quote(dest)}", **cmd_kwargs)["retcode"] == 0 ): raise CommandExecutionError( "Destination path {} already exists. Use overwrite=True to " @@ -386,8 +366,8 @@ def _state(name): ) # Before we try to replace the file, compare checksums. - source_md5 = __salt__["file.get_sum"](local_file, "md5") - if source_md5 == _get_md5(name, dest, run_all): + source_sha256 = __salt__["file.get_sum"](local_file, "sha256") + if source_sha256 == _get_sha256(name, dest, run_all): log.debug("%s and %s:%s are the same file, skipping copy", source, name, dest) return True @@ -401,14 +381,14 @@ def _state(name): if exec_driver == "lxc-attach": lxcattach = "lxc-attach" if path: - lxcattach += " -P {}".format(pipes.quote(path)) + lxcattach += f" -P {shlex.quote(path)}" copy_cmd = ( 'cat "{0}" | {4} --clear-env --set-var {1} -n {2} -- tee "{3}"'.format( local_file, PATH, name, dest, lxcattach ) ) elif exec_driver == "nsenter": - pid = __salt__["{}.pid".format(container_type)](name) + pid = __salt__[f"{container_type}.pid"](name) copy_cmd = 'cat "{}" | {} env -i {} tee "{}"'.format( local_file, _nsenter(pid), PATH, dest ) @@ -417,4 +397,4 @@ def _state(name): local_file, name, PATH, dest ) __salt__["cmd.run"](copy_cmd, python_shell=True, output_loglevel="quiet") - return source_md5 == _get_md5(name, dest, run_all) + return source_sha256 == _get_sha256(name, dest, run_all) diff --git a/salt/modules/cp.py b/salt/modules/cp.py index 8daab8bb674d..64666aefa5d8 100644 --- a/salt/modules/cp.py +++ b/salt/modules/cp.py @@ -20,6 +20,7 @@ import salt.utils.templates import salt.utils.url from salt.exceptions import CommandExecutionError +from salt.loader.dunder import __file_client__ log = logging.getLogger(__name__) @@ -41,7 +42,7 @@ def _gather_pillar(pillarenv, pillar_override): """ pillar = salt.pillar.get_pillar( __opts__, - __grains__, + __grains__.value(), __opts__["id"], __opts__["saltenv"], pillar_override=pillar_override, @@ -157,26 +158,16 @@ def _error(msg): pass -def _mk_client(): - """ - Create a file client and add it to the context. - - Each file client needs to correspond to a unique copy - of the opts dictionary, therefore it's hashed by the - id of the __opts__ dict - """ - if "cp.fileclient_{}".format(id(__opts__)) not in __context__: - __context__[ - "cp.fileclient_{}".format(id(__opts__)) - ] = salt.fileclient.get_file_client(__opts__) - - def _client(): """ - Return a client, hashed by the list of masters + Return a file client + + If the __file_client__ context is set return it, otherwize create a new + file client using __opts__. """ - _mk_client() - return __context__["cp.fileclient_{}".format(id(__opts__))] + if __file_client__: + return __file_client__.value() + return salt.fileclient.get_file_client(__opts__) def _render_filenames(path, dest, saltenv, template, **kw): @@ -294,7 +285,8 @@ def get_file( if not hash_file(path, saltenv): return "" else: - return _client().get_file(path, dest, makedirs, saltenv, gzip) + with _client() as client: + return client.get_file(path, dest, makedirs, saltenv, gzip) def envs(): @@ -307,7 +299,8 @@ def envs(): salt '*' cp.envs """ - return _client().envs() + with _client() as client: + return client.envs() def get_template(path, dest, template="jinja", saltenv=None, makedirs=False, **kwargs): @@ -336,7 +329,8 @@ def get_template(path, dest, template="jinja", saltenv=None, makedirs=False, **k kwargs["grains"] = __grains__ if "opts" not in kwargs: kwargs["opts"] = __opts__ - return _client().get_template(path, dest, template, makedirs, saltenv, **kwargs) + with _client() as client: + return client.get_template(path, dest, template, makedirs, saltenv, **kwargs) def get_dir(path, dest, saltenv=None, template=None, gzip=None, **kwargs): @@ -359,7 +353,8 @@ def get_dir(path, dest, saltenv=None, template=None, gzip=None, **kwargs): (path, dest) = _render_filenames(path, dest, saltenv, template, **kwargs) - return _client().get_dir(path, dest, saltenv, gzip) + with _client() as client: + return client.get_dir(path, dest, saltenv, gzip) def get_url(path, dest="", saltenv=None, makedirs=False, source_hash=None): @@ -417,13 +412,16 @@ def get_url(path, dest="", saltenv=None, makedirs=False, source_hash=None): saltenv = __opts__["saltenv"] or "base" if isinstance(dest, str): - result = _client().get_url( - path, dest, makedirs, saltenv, source_hash=source_hash - ) + with _client() as client: + result = client.get_url( + path, dest, makedirs, saltenv, source_hash=source_hash + ) else: - result = _client().get_url( - path, None, makedirs, saltenv, no_cache=True, source_hash=source_hash - ) + + with _client() as client: + result = client.get_url( + path, None, makedirs, saltenv, no_cache=True, source_hash=source_hash + ) if not result: log.error( "Unable to fetch file %s from saltenv %s.", @@ -550,9 +548,14 @@ def cache_file(path, saltenv=None, source_hash=None, verify_ssl=True, use_etag=F if senv: saltenv = senv - result = _client().cache_file( - path, saltenv, source_hash=source_hash, verify_ssl=verify_ssl, use_etag=use_etag - ) + with _client() as client: + result = client.cache_file( + path, + saltenv, + source_hash=source_hash, + verify_ssl=verify_ssl, + use_etag=use_etag, + ) if not result and not use_etag: log.error("Unable to cache file '%s' from saltenv '%s'.", path, saltenv) if path_is_remote: @@ -587,7 +590,8 @@ def cache_dest(url, saltenv=None): """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().cache_dest(url, saltenv) + with _client() as client: + return client.cache_dest(url, saltenv) def cache_files(paths, saltenv=None): @@ -631,7 +635,8 @@ def cache_files(paths, saltenv=None): """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().cache_files(paths, saltenv) + with _client() as client: + return client.cache_files(paths, saltenv) def cache_dir( @@ -672,7 +677,8 @@ def cache_dir( """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().cache_dir(path, saltenv, include_empty, include_pat, exclude_pat) + with _client() as client: + return client.cache_dir(path, saltenv, include_empty, include_pat, exclude_pat) def cache_master(saltenv=None): @@ -690,7 +696,8 @@ def cache_master(saltenv=None): """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().cache_master(saltenv) + with _client() as client: + return client.cache_master(saltenv) def cache_local_file(path): @@ -717,7 +724,8 @@ def cache_local_file(path): return path_cached # The file hasn't been cached or has changed; cache it - return _client().cache_local_file(path) + with _client() as client: + return client.cache_local_file(path) def list_states(saltenv=None): @@ -735,7 +743,8 @@ def list_states(saltenv=None): """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().list_states(saltenv) + with _client() as client: + return client.list_states(saltenv) def list_master(saltenv=None, prefix=""): @@ -753,7 +762,8 @@ def list_master(saltenv=None, prefix=""): """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().file_list(saltenv, prefix) + with _client() as client: + return client.file_list(saltenv, prefix) def list_master_dirs(saltenv=None, prefix=""): @@ -771,7 +781,8 @@ def list_master_dirs(saltenv=None, prefix=""): """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().dir_list(saltenv, prefix) + with _client() as client: + return client.dir_list(saltenv, prefix) def list_master_symlinks(saltenv=None, prefix=""): @@ -789,7 +800,8 @@ def list_master_symlinks(saltenv=None, prefix=""): """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().symlink_list(saltenv, prefix) + with _client() as client: + return client.symlink_list(saltenv, prefix) def list_minion(saltenv=None): @@ -807,7 +819,8 @@ def list_minion(saltenv=None): """ if not saltenv: saltenv = __opts__["saltenv"] or "base" - return _client().file_local_list(saltenv) + with _client() as client: + return client.file_local_list(saltenv) def is_cached(path, saltenv=None): @@ -831,7 +844,8 @@ def is_cached(path, saltenv=None): if senv: saltenv = senv - return _client().is_cached(path, saltenv) + with _client() as client: + return client.is_cached(path, saltenv) def hash_file(path, saltenv=None): @@ -856,7 +870,8 @@ def hash_file(path, saltenv=None): if senv: saltenv = senv - return _client().hash_file(path, saltenv) + with _client() as client: + return client.hash_file(path, saltenv) def stat_file(path, saltenv=None, octal=True): @@ -881,7 +896,8 @@ def stat_file(path, saltenv=None, octal=True): if senv: saltenv = senv - stat = _client().hash_and_stat_file(path, saltenv)[1] + with _client() as client: + stat = client.hash_and_stat_file(path, saltenv)[1] if stat is None: return stat return salt.utils.files.st_mode_to_octal(stat[0]) if octal is True else stat[0] diff --git a/salt/modules/cryptdev.py b/salt/modules/cryptdev.py index f4d24e5227f1..40c28d17f105 100644 --- a/salt/modules/cryptdev.py +++ b/salt/modules/cryptdev.py @@ -113,7 +113,7 @@ def active(): ret = {} # TODO: This command should be extended to collect more information, such as UUID. devices = __salt__["cmd.run_stdout"]("dmsetup ls --target crypt") - out_regex = re.compile(r"(?P\w+)\W+\((?P\d+), (?P\d+)\)") + out_regex = re.compile(r"(?P\S+)\s+\((?P\d+), (?P\d+)\)") log.debug(devices) for line in devices.split("\n"): diff --git a/salt/modules/deb_postgres.py b/salt/modules/deb_postgres.py index 3ecd4a8ba492..d92859562d46 100644 --- a/salt/modules/deb_postgres.py +++ b/salt/modules/deb_postgres.py @@ -2,10 +2,8 @@ Module to provide Postgres compatibility to salt for debian family specific tools. """ - - import logging -import pipes +import shlex import salt.utils.path @@ -76,7 +74,7 @@ def cluster_create( cmd += ["--data-checksums"] if wal_segsize: cmd += ["--wal-segsize", wal_segsize] - cmdstr = " ".join([pipes.quote(c) for c in cmd]) + cmdstr = " ".join([shlex.quote(c) for c in cmd]) ret = __salt__["cmd.run_all"](cmdstr, python_shell=False) if ret.get("retcode", 0) != 0: log.error("Error creating a Postgresql cluster %s/%s", version, name) @@ -97,7 +95,7 @@ def cluster_list(verbose=False): salt '*' postgres.cluster_list verbose=True """ cmd = [salt.utils.path.which("pg_lsclusters"), "--no-header"] - ret = __salt__["cmd.run_all"](" ".join([pipes.quote(c) for c in cmd])) + ret = __salt__["cmd.run_all"](" ".join([shlex.quote(c) for c in cmd])) if ret.get("retcode", 0) != 0: log.error("Error listing clusters") cluster_dict = _parse_pg_lscluster(ret["stdout"]) @@ -118,7 +116,7 @@ def cluster_exists(version, name="main"): salt '*' postgres.cluster_exists '9.3' 'main' """ - return "{}/{}".format(version, name) in cluster_list() + return f"{version}/{name}" in cluster_list() def cluster_remove(version, name="main", stop=False): @@ -141,13 +139,13 @@ def cluster_remove(version, name="main", stop=False): if stop: cmd += ["--stop"] cmd += [str(version), name] - cmdstr = " ".join([pipes.quote(c) for c in cmd]) + cmdstr = " ".join([shlex.quote(c) for c in cmd]) ret = __salt__["cmd.run_all"](cmdstr, python_shell=False) # FIXME - return Boolean ? if ret.get("retcode", 0) != 0: log.error("Error removing a Postgresql cluster %s/%s", version, name) else: - ret["changes"] = "Successfully removed cluster {}/{}".format(version, name) + ret["changes"] = f"Successfully removed cluster {version}/{name}" return ret @@ -158,7 +156,7 @@ def _parse_pg_lscluster(output): cluster_dict = {} for line in output.splitlines(): version, name, port, status, user, datadir, log = line.split() - cluster_dict["{}/{}".format(version, name)] = { + cluster_dict[f"{version}/{name}"] = { "port": int(port), "status": status, "user": user, diff --git a/salt/modules/defaults.py b/salt/modules/defaults.py index 2164fed22905..c807337dc2cc 100644 --- a/salt/modules/defaults.py +++ b/salt/modules/defaults.py @@ -25,8 +25,7 @@ def _mk_client(): """ Create a file client and add it to the context """ - if "cp.fileclient" not in __context__: - __context__["cp.fileclient"] = salt.fileclient.get_file_client(__opts__) + return salt.fileclient.get_file_client(__opts__) def _load(formula): @@ -38,13 +37,13 @@ def _load(formula): """ # Compute possibilities - _mk_client() paths = [] for ext in ("yaml", "json"): source_url = salt.utils.url.create(formula + "/defaults." + ext) paths.append(source_url) # Fetch files from master - defaults_files = __context__["cp.fileclient"].cache_files(paths) + with _mk_client() as client: + defaults_files = client.cache_files(paths) for file_ in defaults_files: if not file_: diff --git a/salt/modules/dockermod.py b/salt/modules/dockermod.py index 6870c26b0e61..f9ffd2dda9e8 100644 --- a/salt/modules/dockermod.py +++ b/salt/modules/dockermod.py @@ -204,8 +204,8 @@ import json import logging import os -import pipes import re +import shlex import shutil import string import subprocess @@ -222,6 +222,7 @@ import salt.utils.json import salt.utils.path from salt.exceptions import CommandExecutionError, SaltInvocationError +from salt.loader.dunder import __file_client__ from salt.state import HighState __docformat__ = "restructuredtext en" @@ -252,7 +253,6 @@ HAS_NSENTER = bool(salt.utils.path.which("nsenter")) -# Set up logging log = logging.getLogger(__name__) # Don't shadow built-in's. @@ -326,6 +326,18 @@ def __virtual__(): return (False, "Could not import docker module, is docker-py installed?") +def _file_client(): + """ + Return a file client + + If the __file_client__ context is set return it, otherwize create a new + file client using __opts__. + """ + if __file_client__: + return __file_client__.value() + return salt.fileclient.get_file_client(__opts__) + + class DockerJSONDecoder(json.JSONDecoder): def decode(self, s, _w=None): objs = [] @@ -392,7 +404,7 @@ def _get_client(timeout=NOTSET, **kwargs): ) except Exception as exc: # pylint: disable=broad-except raise CommandExecutionError( - "Docker machine {} failed: {}".format(docker_machine, exc) + f"Docker machine {docker_machine} failed: {exc}" ) try: # docker-py 2.0 renamed this client attribute @@ -492,7 +504,7 @@ def _change_state(name, action, expected, *args, **kwargs): return { "result": False, "state": {"old": expected, "new": expected}, - "comment": "Container '{}' already {}".format(name, expected), + "comment": f"Container '{name}' already {expected}", } _client_wrapper(action, name, *args, **kwargs) _clear_context() @@ -526,13 +538,11 @@ def _clear_context(): pass -def _get_md5(name, path): +def _get_sha256(name, path): """ - Get the MD5 checksum of a file from a container + Get the sha256 checksum of a file from a container """ - output = run_stdout( - name, "md5sum {}".format(pipes.quote(path)), ignore_retcode=True - ) + output = run_stdout(name, f"sha256sum {shlex.quote(path)}", ignore_retcode=True) try: return output.split()[0] except IndexError: @@ -611,7 +621,7 @@ def _scrub_links(links, name): if isinstance(links, list): ret = [] for l in links: - ret.append(l.replace("/{}/".format(name), "/", 1)) + ret.append(l.replace(f"/{name}/", "/", 1)) else: ret = links @@ -634,11 +644,11 @@ def _size_fmt(num): try: num = int(num) if num < 1024: - return "{} bytes".format(num) + return f"{num} bytes" num /= 1024.0 for unit in ("KiB", "MiB", "GiB", "TiB", "PiB"): if num < 1024.0: - return "{:3.1f} {}".format(num, unit) + return f"{num:3.1f} {unit}" num /= 1024.0 except Exception: # pylint: disable=broad-except log.error("Unable to format file size for '%s'", num) @@ -653,7 +663,7 @@ def _client_wrapper(attr, *args, **kwargs): catch_api_errors = kwargs.pop("catch_api_errors", True) func = getattr(__context__["docker.client"], attr, None) if func is None or not hasattr(func, "__call__"): - raise SaltInvocationError("Invalid client action '{}'".format(attr)) + raise SaltInvocationError(f"Invalid client action '{attr}'") if attr in ("push", "pull"): try: # Refresh auth config from config.json @@ -673,7 +683,7 @@ def _client_wrapper(attr, *args, **kwargs): if catch_api_errors: # Generic handling of Docker API errors raise CommandExecutionError( - "Error {}: {}".format(exc.response.status_code, exc.explanation) + f"Error {exc.response.status_code}: {exc.explanation}" ) else: # Allow API errors to be caught further up the stack @@ -688,9 +698,9 @@ def _client_wrapper(attr, *args, **kwargs): # If we're here, it's because an exception was caught earlier, and the # API command failed. - msg = "Unable to perform {}".format(attr) + msg = f"Unable to perform {attr}" if err: - msg += ": {}".format(err) + msg += f": {err}" raise CommandExecutionError(msg) @@ -717,7 +727,7 @@ def _import_status(data, item, repo_name, repo_tag): return elif all(x in string.hexdigits for x in status): # Status is an image ID - data["Image"] = "{}:{}".format(repo_name, repo_tag) + data["Image"] = f"{repo_name}:{repo_tag}" data["Id"] = status except (AttributeError, TypeError): pass @@ -876,7 +886,7 @@ def _get_create_kwargs( ignore_collisions=False, validate_ip_addrs=True, client_args=None, - **kwargs + **kwargs, ): """ Take input kwargs and return a kwargs dict to pass to docker-py's @@ -894,7 +904,7 @@ def _get_create_kwargs( skip_translate=skip_translate, ignore_collisions=ignore_collisions, validate_ip_addrs=validate_ip_addrs, - **__utils__["args.clean_kwargs"](**kwargs) + **__utils__["args.clean_kwargs"](**kwargs), ) if networks: @@ -907,7 +917,7 @@ def _get_create_kwargs( log.error( "docker.create: Error getting client args: '%s'", exc, exc_info=True ) - raise CommandExecutionError("Failed to get client args: {}".format(exc)) + raise CommandExecutionError(f"Failed to get client args: {exc}") full_host_config = {} host_kwargs = {} @@ -1468,15 +1478,15 @@ def login(*registries): results = ret.setdefault("Results", {}) for registry in registries: if registry not in registry_auth: - errors.append("No match found for registry '{}'".format(registry)) + errors.append(f"No match found for registry '{registry}'") continue try: username = registry_auth[registry]["username"] password = registry_auth[registry]["password"] except TypeError: - errors.append("Invalid configuration for registry '{}'".format(registry)) + errors.append(f"Invalid configuration for registry '{registry}'") except KeyError as exc: - errors.append("Missing {} for registry '{}'".format(exc, registry)) + errors.append(f"Missing {exc} for registry '{registry}'") else: cmd = ["docker", "login", "-u", username, "-p", password] if registry.lower() != "hub": @@ -1562,7 +1572,7 @@ def logout(*registries): results = ret.setdefault("Results", {}) for registry in registries: if registry not in registry_auth: - errors.append("No match found for registry '{}'".format(registry)) + errors.append(f"No match found for registry '{registry}'") continue else: cmd = ["docker", "logout"] @@ -1684,7 +1694,7 @@ def exists(name): salt myminion docker.exists mycontainer """ - contextkey = "docker.exists.{}".format(name) + contextkey = f"docker.exists.{name}" if contextkey in __context__: return __context__[contextkey] try: @@ -1775,7 +1785,7 @@ def history(name, quiet=False): ) for param in ("Size",): if param in step: - step["{}_Human".format(param)] = _size_fmt(step[param]) + step[f"{param}_Human"] = _size_fmt(step[param]) ret.append(copy.deepcopy(step)) if quiet: return [x.get("Command") for x in ret] @@ -1837,9 +1847,7 @@ def images(verbose=False, **kwargs): ) for param in ("Size", "VirtualSize"): if param in bucket.get(img_id, {}): - bucket[img_id]["{}_Human".format(param)] = _size_fmt( - bucket[img_id][param] - ) + bucket[img_id][f"{param}_Human"] = _size_fmt(bucket[img_id][param]) context_data = __context__.get("docker.images", {}) ret = copy.deepcopy(context_data.get("tagged", {})) @@ -1922,7 +1930,7 @@ def inspect(name): raise raise CommandExecutionError( - "Error 404: No such image/container/volume/network: {}".format(name) + f"Error 404: No such image/container/volume/network: {name}" ) @@ -1978,7 +1986,7 @@ def inspect_image(name): ret = _client_wrapper("inspect_image", name) for param in ("Size", "VirtualSize"): if param in ret: - ret["{}_Human".format(param)] = _size_fmt(ret[param]) + ret[f"{param}_Human"] = _size_fmt(ret[param]) return ret @@ -2272,7 +2280,7 @@ def port(name, private_port=None): else: # Sanity checks if isinstance(private_port, int): - pattern = "{}/*".format(private_port) + pattern = f"{private_port}/*" else: err = ( "Invalid private_port '{}'. Must either be a port number, " @@ -2393,7 +2401,7 @@ def state(name): salt myminion docker.state mycontainer """ - contextkey = "docker.state.{}".format(name) + contextkey = f"docker.state.{name}" if contextkey in __context__: return __context__[contextkey] __context__[contextkey] = _get_state(inspect_container(name)) @@ -2433,9 +2441,7 @@ def search(name, official=False, trusted=False): """ response = _client_wrapper("search", name) if not response: - raise CommandExecutionError( - "No images matched the search string '{}'".format(name) - ) + raise CommandExecutionError(f"No images matched the search string '{name}'") key_map = { "description": "Description", @@ -2550,7 +2556,7 @@ def create( ignore_collisions=False, validate_ip_addrs=True, client_timeout=salt.utils.dockermod.CLIENT_TIMEOUT, - **kwargs + **kwargs, ): """ Create a new container @@ -3276,7 +3282,7 @@ def create( skip_translate=skip_translate, ignore_collisions=ignore_collisions, validate_ip_addrs=validate_ip_addrs, - **kwargs + **kwargs, ) if unused_kwargs: @@ -3288,7 +3294,7 @@ def create( log.debug( "docker.create: creating container %susing the following arguments: %s", - "with name '{}' ".format(name) if name is not None else "", + f"with name '{name}' " if name is not None else "", kwargs, ) time_started = time.time() @@ -3326,7 +3332,7 @@ def run_container( replace=False, force=False, networks=None, - **kwargs + **kwargs, ): """ .. versionadded:: 2018.3.0 @@ -3428,7 +3434,7 @@ def run_container( skip_translate=skip_translate, ignore_collisions=ignore_collisions, validate_ip_addrs=validate_ip_addrs, - **kwargs + **kwargs, ) # _get_create_kwargs() will have processed auto_remove and put it into the @@ -3453,7 +3459,7 @@ def run_container( log.debug( "docker.create: creating container %susing the following arguments: %s", - "with name '{}' ".format(name) if name is not None else "", + f"with name '{name}' " if name is not None else "", kwargs, ) @@ -3493,7 +3499,7 @@ def _append_warning(ret, msg): rm_(name) except CommandExecutionError as rm_exc: exc_info.setdefault("other_errors", []).append( - "Failed to auto_remove container: {}".format(rm_exc) + f"Failed to auto_remove container: {rm_exc}" ) # Raise original exception with additional info raise CommandExecutionError(exc.__str__(), info=exc_info) @@ -3588,7 +3594,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False): """ c_state = state(name) if c_state != "running": - raise CommandExecutionError("Container '{}' is not running".format(name)) + raise CommandExecutionError(f"Container '{name}' is not running") # Destination file sanity checks if not os.path.isabs(dest): @@ -3614,9 +3620,7 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False): ) ) else: - raise SaltInvocationError( - "Directory {} does not exist".format(dest_dir) - ) + raise SaltInvocationError(f"Directory {dest_dir} does not exist") if not overwrite and os.path.exists(dest): raise CommandExecutionError( "Destination path {} already exists. Use overwrite=True to " @@ -3627,23 +3631,18 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False): if not os.path.isabs(source): raise SaltInvocationError("Source path must be absolute") else: - if ( - retcode(name, "test -e {}".format(pipes.quote(source)), ignore_retcode=True) - == 0 - ): + if retcode(name, f"test -e {shlex.quote(source)}", ignore_retcode=True) == 0: if ( - retcode( - name, "test -f {}".format(pipes.quote(source)), ignore_retcode=True - ) + retcode(name, f"test -f {shlex.quote(source)}", ignore_retcode=True) != 0 ): raise SaltInvocationError("Source must be a regular file") else: - raise SaltInvocationError("Source file {} does not exist".format(source)) + raise SaltInvocationError(f"Source file {source} does not exist") # Before we try to replace the file, compare checksums. - source_md5 = _get_md5(name, source) - if source_md5 == __salt__["file.get_sum"](dest, "md5"): + source_sha256 = _get_sha256(name, source) + if source_sha256 == __salt__["file.get_sum"](dest, "sha256"): log.debug("%s:%s and %s are the same file, skipping copy", name, source, dest) return True @@ -3652,10 +3651,10 @@ def copy_from(name, source, dest, overwrite=False, makedirs=False): try: src_path = ":".join((name, source)) except TypeError: - src_path = "{}:{}".format(name, source) + src_path = f"{name}:{source}" cmd = ["docker", "cp", src_path, dest_dir] __salt__["cmd.run"](cmd, python_shell=False) - return source_md5 == __salt__["file.get_sum"](dest, "md5") + return source_sha256 == __salt__["file.get_sum"](dest, "sha256") # Docker cp gets a file from the container, alias this to copy_from @@ -3779,7 +3778,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar salt myminion docker.export mycontainer /tmp/mycontainer.tar salt myminion docker.export mycontainer /tmp/mycontainer.tar.xz push=True """ - err = "Path '{}' is not absolute".format(path) + err = f"Path '{path}' is not absolute" try: if not os.path.isabs(path): raise SaltInvocationError(err) @@ -3787,7 +3786,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar raise SaltInvocationError(err) if os.path.exists(path) and not overwrite: - raise CommandExecutionError("{} already exists".format(path)) + raise CommandExecutionError(f"{path} already exists") if compression is None: if path.endswith(".tar.gz") or path.endswith(".tgz"): @@ -3810,7 +3809,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar compression = "xz" if compression and compression not in ("gzip", "bzip2", "xz"): - raise SaltInvocationError("Invalid compression type '{}'".format(compression)) + raise SaltInvocationError(f"Invalid compression type '{compression}'") parent_dir = os.path.dirname(path) if not os.path.isdir(parent_dir): @@ -3823,16 +3822,14 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar os.makedirs(parent_dir) except OSError as exc: raise CommandExecutionError( - "Unable to make parent dir {}: {}".format(parent_dir, exc) + f"Unable to make parent dir {parent_dir}: {exc}" ) if compression == "gzip": try: out = gzip.open(path, "wb") except OSError as exc: - raise CommandExecutionError( - "Unable to open {} for writing: {}".format(path, exc) - ) + raise CommandExecutionError(f"Unable to open {path} for writing: {exc}") elif compression == "bzip2": compressor = bz2.BZ2Compressor() elif compression == "xz": @@ -3870,9 +3867,7 @@ def export(name, path, overwrite=False, makedirs=False, compression=None, **kwar os.remove(path) except OSError: pass - raise CommandExecutionError( - "Error occurred during container export: {}".format(exc) - ) + raise CommandExecutionError(f"Error occurred during container export: {exc}") finally: out.close() ret = {"Time_Elapsed": time.time() - time_started} @@ -4103,7 +4098,7 @@ def build( # For the build function in the low-level API, the "tag" refers to the full # tag (e.g. myuser/myimage:mytag). This is different than in other # functions, where the repo and tag are passed separately. - image_tag = "{}:{}".format(repository, tag) if repository and tag else None + image_tag = f"{repository}:{tag}" if repository and tag else None time_started = time.time() response = _client_wrapper( @@ -4122,7 +4117,7 @@ def build( if not response: raise CommandExecutionError( - "Build failed for {}, no response returned from Docker API".format(path) + f"Build failed for {path}, no response returned from Docker API" ) stream_data = [] @@ -4145,7 +4140,7 @@ def build( if "Id" not in ret: # API returned information, but there was no confirmation of a # successful build. - msg = "Build failed for {}".format(path) + msg = f"Build failed for {path}" log.error(msg) log.error(stream_data) if errors: @@ -4156,7 +4151,7 @@ def build( if resolved_tag: ret["Image"] = resolved_tag else: - ret["Warning"] = "Failed to tag image as {}".format(image_tag) + ret["Warning"] = f"Failed to tag image as {image_tag}" if api_response: ret["API_Response"] = stream_data @@ -4363,7 +4358,7 @@ def import_(source, repository, tag="latest", api_response=False): if not response: raise CommandExecutionError( - "Import failed for {}, no response returned from Docker API".format(source) + f"Import failed for {source}, no response returned from Docker API" ) elif api_response: ret["API_Response"] = response @@ -4383,7 +4378,7 @@ def import_(source, repository, tag="latest", api_response=False): if "Id" not in ret: # API returned information, but there was no confirmation of a # successful push. - msg = "Import failed for {}".format(source) + msg = f"Import failed for {source}" if errors: msg += ". Error(s) follow:\n\n{}".format("\n\n".join(errors)) raise CommandExecutionError(msg) @@ -4458,7 +4453,7 @@ def load(path, repository=None, tag=None): local_path = __salt__["container_resource.cache_file"](path) if not os.path.isfile(local_path): - raise CommandExecutionError("Source file {} does not exist".format(path)) + raise CommandExecutionError(f"Source file {path} does not exist") pre = images(all=True) cmd = ["docker", "load", "-i", local_path] @@ -4468,7 +4463,7 @@ def load(path, repository=None, tag=None): _clear_context() post = images(all=True) if result["retcode"] != 0: - msg = "Failed to load image(s) from {}".format(path) + msg = f"Failed to load image(s) from {path}" if result["stderr"]: msg += ": {}".format(result["stderr"]) raise CommandExecutionError(msg) @@ -4489,7 +4484,7 @@ def load(path, repository=None, tag=None): # strings when passed (e.g. a numeric tag would be loaded as an int # or float), and because the tag_ function will stringify them if # need be, a str.format is the correct thing to do here. - tagged_image = "{}:{}".format(repository, tag) + tagged_image = f"{repository}:{tag}" try: result = tag_(top_level_images[0], repository=repository, tag=tag) ret["Image"] = tagged_image @@ -4526,7 +4521,7 @@ def layers(name): ): ret.append(line) if not ret: - raise CommandExecutionError("Image '{}' not found".format(name)) + raise CommandExecutionError(f"Image '{name}' not found") return ret @@ -4597,7 +4592,7 @@ def pull( if not response: raise CommandExecutionError( - "Pull failed for {}, no response returned from Docker API".format(image) + f"Pull failed for {image}, no response returned from Docker API" ) elif api_response: ret["API_Response"] = response @@ -4610,7 +4605,7 @@ def pull( event = salt.utils.json.loads(event) except Exception as exc: # pylint: disable=broad-except raise CommandExecutionError( - "Unable to interpret API event: '{}'".format(event), + f"Unable to interpret API event: '{event}'", info={"Error": exc.__str__()}, ) try: @@ -4692,7 +4687,7 @@ def push( if not response: raise CommandExecutionError( - "Push failed for {}, no response returned from Docker API".format(image) + f"Push failed for {image}, no response returned from Docker API" ) elif api_response: ret["API_Response"] = response @@ -4704,7 +4699,7 @@ def push( event = salt.utils.json.loads(event) except Exception as exc: # pylint: disable=broad-except raise CommandExecutionError( - "Unable to interpret API event: '{}'".format(event), + f"Unable to interpret API event: '{event}'", info={"Error": exc.__str__()}, ) try: @@ -4784,9 +4779,7 @@ def rmi(*names, **kwargs): err += "image(s): {}".format(", ".join(deps["Images"])) errors.append(err) else: - errors.append( - "Error {}: {}".format(exc.response.status_code, exc.explanation) - ) + errors.append(f"Error {exc.response.status_code}: {exc.explanation}") _clear_context() ret = { @@ -4874,7 +4867,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs salt myminion docker.save centos:7 /tmp/cent7.tar salt myminion docker.save 0123456789ab cdef01234567 /tmp/saved.tar """ - err = "Path '{}' is not absolute".format(path) + err = f"Path '{path}' is not absolute" try: if not os.path.isabs(path): raise SaltInvocationError(err) @@ -4882,7 +4875,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs raise SaltInvocationError(err) if os.path.exists(path) and not overwrite: - raise CommandExecutionError("{} already exists".format(path)) + raise CommandExecutionError(f"{path} already exists") if compression is None: if path.endswith(".tar.gz") or path.endswith(".tgz"): @@ -4905,7 +4898,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs compression = "xz" if compression and compression not in ("gzip", "bzip2", "xz"): - raise SaltInvocationError("Invalid compression type '{}'".format(compression)) + raise SaltInvocationError(f"Invalid compression type '{compression}'") parent_dir = os.path.dirname(path) if not os.path.isdir(parent_dir): @@ -4927,7 +4920,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs time_started = time.time() result = __salt__["cmd.run_all"](cmd, python_shell=False) if result["retcode"] != 0: - err = "Failed to save image(s) to {}".format(path) + err = f"Failed to save image(s) to {path}" if result["stderr"]: err += ": {}".format(result["stderr"]) raise CommandExecutionError(err) @@ -4937,9 +4930,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs try: out = gzip.open(path, "wb") except OSError as exc: - raise CommandExecutionError( - "Unable to open {} for writing: {}".format(path, exc) - ) + raise CommandExecutionError(f"Unable to open {path} for writing: {exc}") elif compression == "bzip2": compressor = bz2.BZ2Compressor() elif compression == "xz": @@ -4975,9 +4966,7 @@ def save(name, path, overwrite=False, makedirs=False, compression=None, **kwargs os.remove(path) except OSError: pass - raise CommandExecutionError( - "Error occurred during image save: {}".format(exc) - ) + raise CommandExecutionError(f"Error occurred during image save: {exc}") finally: try: # Clean up temp file @@ -5097,7 +5086,7 @@ def create_network( ignore_collisions=False, validate_ip_addrs=True, client_timeout=salt.utils.dockermod.CLIENT_TIMEOUT, - **kwargs + **kwargs, ): """ .. versionchanged:: 2018.3.0 @@ -5337,7 +5326,7 @@ def create_network( skip_translate=skip_translate, ignore_collisions=ignore_collisions, validate_ip_addrs=validate_ip_addrs, - **__utils__["args.clean_kwargs"](**kwargs) + **__utils__["args.clean_kwargs"](**kwargs), ) if "ipam" not in kwargs: @@ -5669,7 +5658,7 @@ def pause(name): return { "result": False, "state": {"old": orig_state, "new": orig_state}, - "comment": "Container '{}' is stopped, cannot pause".format(name), + "comment": f"Container '{name}' is stopped, cannot pause", } return _change_state(name, "pause", "paused") @@ -5768,7 +5757,7 @@ def start_(name): return { "result": False, "state": {"old": orig_state, "new": orig_state}, - "comment": "Container '{}' is paused, cannot start".format(name), + "comment": f"Container '{name}' is paused, cannot start", } return _change_state(name, "start", "running") @@ -5873,7 +5862,7 @@ def unpause(name): return { "result": False, "state": {"old": orig_state, "new": orig_state}, - "comment": "Container '{}' is stopped, cannot unpause".format(name), + "comment": f"Container '{name}' is stopped, cannot unpause", } return _change_state(name, "unpause", "running") @@ -5922,7 +5911,7 @@ def wait(name, ignore_already_stopped=False, fail_on_exit_status=False): # Container doesn't exist anymore return { "result": ignore_already_stopped, - "comment": "Container '{}' absent".format(name), + "comment": f"Container '{name}' absent", } already_stopped = pre == "stopped" response = _client_wrapper("wait", name) @@ -5946,7 +5935,7 @@ def wait(name, ignore_already_stopped=False, fail_on_exit_status=False): "exit_status": response, } if already_stopped: - result["comment"] = "Container '{}' already stopped".format(name) + result["comment"] = f"Container '{name}' already stopped" if fail_on_exit_status and result["result"]: result["result"] = result["exit_status"] == 0 return result @@ -5959,7 +5948,7 @@ def prune( build=False, volumes=False, system=None, - **filters + **filters, ): """ .. versionadded:: 2019.2.0 @@ -6644,16 +6633,8 @@ def script_retcode( )["retcode"] -def _mk_fileclient(): - """ - Create a file client and add it to the context. - """ - if "cp.fileclient" not in __context__: - __context__["cp.fileclient"] = salt.fileclient.get_file_client(__opts__) - - def _generate_tmp_path(): - return os.path.join("/tmp", "salt.docker.{}".format(uuid.uuid4().hex[:6])) + return os.path.join("/tmp", f"salt.docker.{uuid.uuid4().hex[:6]}") def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs=""): @@ -6665,11 +6646,10 @@ def _prepare_trans_tar(name, sls_opts, mods=None, pillar=None, extra_filerefs="" # reuse it from salt.ssh, however this function should # be somewhere else refs = salt.client.ssh.state.lowstate_file_refs(chunks, extra_filerefs) - _mk_fileclient() - trans_tar = salt.client.ssh.state.prep_trans_tar( - __context__["cp.fileclient"], chunks, refs, pillar, name - ) - return trans_tar + with _file_client() as fileclient: + return salt.client.ssh.state.prep_trans_tar( + fileclient, chunks, refs, pillar, name + ) def _compile_state(sls_opts, mods=None): @@ -6789,7 +6769,7 @@ def call(name, function, *args, **kwargs): ] + list(args) + [ - "{}={}".format(key, value) + f"{key}={value}" for (key, value) in kwargs.items() if not key.startswith("__") ] diff --git a/salt/modules/dpkg_lowpkg.py b/salt/modules/dpkg_lowpkg.py index 2bd12a7e6150..eefd852c511e 100644 --- a/salt/modules/dpkg_lowpkg.py +++ b/salt/modules/dpkg_lowpkg.py @@ -37,12 +37,12 @@ def bin_pkg_info(path, saltenv="base"): """ .. versionadded:: 2015.8.0 - Parses RPM metadata and returns a dictionary of information about the + Parses DEB metadata and returns a dictionary of information about the package (name, version, etc.). path Path to the file. Can either be an absolute path to a file on the - minion, or a salt fileserver URL (e.g. ``salt://path/to/file.rpm``). + minion, or a salt fileserver URL (e.g. ``salt://path/to/file.deb``). If a salt fileserver URL is passed, the file will be cached to the minion so that it can be examined. diff --git a/salt/modules/git.py b/salt/modules/git.py index 71b771b6f1b3..62dd70289941 100644 --- a/salt/modules/git.py +++ b/salt/modules/git.py @@ -1019,7 +1019,7 @@ def clone( https_user Set HTTP Basic Auth username. Only accepted for HTTPS URLs. - .. versionadded:: 20515.5.0 + .. versionadded:: 2015.5.0 https_pass Set HTTP Basic Auth password. Only accepted for HTTPS URLs. diff --git a/salt/modules/gpg.py b/salt/modules/gpg.py index 1367036083fb..b713a0ae9fd1 100644 --- a/salt/modules/gpg.py +++ b/salt/modules/gpg.py @@ -66,6 +66,8 @@ "4": "Ultimate", } +_DEFAULT_KEY_SERVER = "keys.openpgp.org" + try: import gnupg @@ -216,7 +218,7 @@ def search_keys(text, keyserver=None, user=None): Text to search the keyserver for, e.g. email address, keyID or fingerprint. keyserver - Keyserver to use for searching for GPG keys, defaults to pgp.mit.edu. + Keyserver to use for searching for GPG keys, defaults to keys.openpgp.org. user Which user's keychain to access, defaults to user Salt is running as. @@ -235,7 +237,7 @@ def search_keys(text, keyserver=None, user=None): """ if not keyserver: - keyserver = "pgp.mit.edu" + keyserver = _DEFAULT_KEY_SERVER _keys = [] for _key in _search_keys(text, keyserver, user): @@ -881,7 +883,7 @@ def receive_keys(keyserver=None, keys=None, user=None, gnupghome=None): Receive key(s) from keyserver and add them to keychain keyserver - Keyserver to use for searching for GPG keys, defaults to pgp.mit.edu + Keyserver to use for searching for GPG keys, defaults to keys.openpgp.org keys The keyID(s) to retrieve from the keyserver. Can be specified as a comma @@ -911,7 +913,7 @@ def receive_keys(keyserver=None, keys=None, user=None, gnupghome=None): gpg = _create_gpg(user, gnupghome) if not keyserver: - keyserver = "pgp.mit.edu" + keyserver = _DEFAULT_KEY_SERVER if isinstance(keys, str): keys = keys.split(",") diff --git a/salt/modules/guestfs.py b/salt/modules/guestfs.py index 1d03ab693f20..2395bd2a1c39 100644 --- a/salt/modules/guestfs.py +++ b/salt/modules/guestfs.py @@ -11,6 +11,7 @@ import time import salt.utils.path +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -51,7 +52,7 @@ def mount(location, access="rw", root=None): while True: if os.listdir(root): # Stuff is in there, don't use it - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) rand = hash_type(os.urandom(32)).hexdigest() root = os.path.join( tempfile.gettempdir(), diff --git a/salt/modules/junos.py b/salt/modules/junos.py index 33f25080e1dc..2f1f0c6ab4fb 100644 --- a/salt/modules/junos.py +++ b/salt/modules/junos.py @@ -2051,7 +2051,7 @@ def _make_source_list(dir): @_timeout_decorator -def file_compare(file1, file2, **kwargs): +def file_compare(file1, file2, **kwargs): # pragma: no cover """ Compare two files and return a dictionary indicating if they are different. @@ -2113,7 +2113,7 @@ def file_compare(file1, file2, **kwargs): @_timeout_decorator -def fsentry_exists(dir, **kwargs): +def fsentry_exists(dir, **kwargs): # pragma: no cover """ Returns a dictionary indicating if `dir` refers to a file or a non-file (generally a directory) in the file system, @@ -2258,7 +2258,7 @@ def routing_engine(**kwargs): @_timeout_decorator -def dir_copy(source, dest, force=False, **kwargs): +def dir_copy(source, dest, force=False, **kwargs): # pragma: no cover """ Copy a directory and recursively its contents from source to dest. diff --git a/salt/modules/lxc.py b/salt/modules/lxc.py index bea6445db987..444359bd2165 100644 --- a/salt/modules/lxc.py +++ b/salt/modules/lxc.py @@ -12,9 +12,9 @@ import difflib import logging import os -import pipes import random import re +import shlex import shutil import string import tempfile @@ -119,7 +119,7 @@ def version(): ver = Version(cversion["stdout"]) if ver < Version("1.0"): raise CommandExecutionError("LXC should be at least 1.0") - __context__[k] = "{}".format(ver) + __context__[k] = f"{ver}" return __context__.get(k, None) @@ -141,7 +141,7 @@ def _ip_sort(ip): idx = "201" elif "::" in ip: idx = "100" - return "{}___{}".format(idx, ip) + return f"{idx}___{ip}" def search_lxc_bridges(): @@ -173,7 +173,7 @@ def search_lxc_bridges(): for ifc, ip in __grains__.get("ip_interfaces", {}).items(): if ifc in running_bridges: bridges.add(ifc) - elif os.path.exists("/sys/devices/virtual/net/{}/bridge".format(ifc)): + elif os.path.exists(f"/sys/devices/virtual/net/{ifc}/bridge"): bridges.add(ifc) bridges = list(bridges) # if we found interfaces that have lxc in their names @@ -186,7 +186,7 @@ def sort_bridges(a): pref = "a" elif "br0" == a: pref = "c" - return "{}_{}".format(pref, a) + return f"{pref}_{a}" bridges.sort(key=sort_bridges) __context__["lxc.bridges"] = bridges @@ -439,12 +439,12 @@ def _cloud_get(k, default=None): if ip: fullip = ip if netmask: - fullip += "/{}".format(netmask) + fullip += f"/{netmask}" eth0["ipv4"] = fullip if mac is not None: eth0["mac"] = mac for ix, iopts in enumerate(_cloud_get("additional_ips", [])): - ifh = "eth{}".format(ix + 1) + ifh = f"eth{ix + 1}" ethx = nic_opts.setdefault(ifh, {}) if gw is None: gw = iopts.get("gateway", ethx.get("gateway", None)) @@ -465,7 +465,7 @@ def _cloud_get(k, default=None): ethx["ipv4"] = aip nm = iopts.get("netmask", "") if nm: - ethx["ipv4"] += "/{}".format(nm) + ethx["ipv4"] += f"/{nm}" for i in ("mac", "hwaddr"): if i in iopts: ethx["mac"] = iopts[i] @@ -543,7 +543,7 @@ def _get_profile(key, name, **kwargs): profile_match = {} else: profile_match = __salt__["config.get"]( - "lxc.{1}:{0}".format(name, key), default=None, merge="recurse" + f"lxc.{key}:{name}", default=None, merge="recurse" ) if profile_match is None: # No matching profile, make the profile an empty dict so that @@ -551,7 +551,7 @@ def _get_profile(key, name, **kwargs): profile_match = {} if not isinstance(profile_match, dict): - raise CommandExecutionError("lxc.{} must be a dictionary".format(key)) + raise CommandExecutionError(f"lxc.{key} must be a dictionary") # Overlay the kwargs to override matched profile data overrides = salt.utils.args.clean_kwargs(**copy.deepcopy(kwargs)) @@ -669,7 +669,7 @@ def _rand_cpu_str(cpu): cpu = int(cpu) avail = __salt__["status.nproc"]() if cpu < avail: - return "0-{}".format(avail) + return f"0-{avail}" to_set = set() while len(to_set) < cpu: choice = random.randint(0, avail - 1) @@ -832,7 +832,7 @@ def _network_conf(conf_tuples=None, **kwargs): "ipv6", ]: continue - ret.append({"lxc.network.{}".format(key): val}) + ret.append({f"lxc.network.{key}": val}) # gateway (in automode) must be appended following network conf ! if not gateway: gateway = args.get("gateway", None) @@ -892,7 +892,7 @@ def _get_lxc_default_data(**kwargs): for k in ["utsname", "rootfs"]: val = kwargs.get(k, None) if val is not None: - ret["lxc.{}".format(k)] = val + ret[f"lxc.{k}"] = val autostart = kwargs.get("autostart") # autostart can have made in kwargs, but with the None # value which is invalid, we need an explicit boolean @@ -1115,7 +1115,7 @@ def select(key, default=None): hash_ = salt.utils.hashutils.get_hash( img_tar, __salt__["config.get"]("hash_type") ) - name = "__base_{}_{}_{}".format(proto, img_name, hash_) + name = f"__base_{proto}_{img_name}_{hash_}" if not exists(name, path=path): create( name, template=template, image=image, path=path, vgname=vgname, **kwargs @@ -1125,11 +1125,11 @@ def select(key, default=None): edit_conf( info(name, path=path)["config"], out_format="commented", - **{"lxc.rootfs": rootfs} + **{"lxc.rootfs": rootfs}, ) return name elif template: - name = "__base_{}".format(template) + name = f"__base_{template}" if not exists(name, path=path): create( name, template=template, image=image, path=path, vgname=vgname, **kwargs @@ -1139,7 +1139,7 @@ def select(key, default=None): edit_conf( info(name, path=path)["config"], out_format="commented", - **{"lxc.rootfs": rootfs} + **{"lxc.rootfs": rootfs}, ) return name return "" @@ -1171,7 +1171,7 @@ def init( bootstrap_args=None, bootstrap_shell=None, bootstrap_url=None, - **kwargs + **kwargs, ): """ Initialize a new container. @@ -1499,7 +1499,7 @@ def select(key, default=None): try: stop(name, path=path) except (SaltInvocationError, CommandExecutionError) as exc: - ret["comment"] = "Unable to stop container: {}".format(exc) + ret["comment"] = f"Unable to stop container: {exc}" if changes: ret["changes"] = changes_dict return ret @@ -1507,7 +1507,7 @@ def select(key, default=None): try: start(name, path=path) except (SaltInvocationError, CommandExecutionError) as exc: - ret["comment"] = "Unable to stop container: {}".format(exc) + ret["comment"] = f"Unable to stop container: {exc}" if changes: ret["changes"] = changes_dict return ret @@ -1515,7 +1515,7 @@ def select(key, default=None): if remove_seed_marker: run( name, - "rm -f '{}'".format(SEED_MARKER), + f"rm -f '{SEED_MARKER}'", path=path, chroot_fallback=False, python_shell=False, @@ -1524,11 +1524,11 @@ def select(key, default=None): # set the default user/password, only the first time if ret.get("result", True) and password: gid = "/.lxc.initial_pass" - gids = [gid, "/lxc.initial_pass", "/.lxc.{}.initial_pass".format(name)] + gids = [gid, "/lxc.initial_pass", f"/.lxc.{name}.initial_pass"] if not any( retcode( name, - 'test -e "{}"'.format(x), + f'test -e "{x}"', chroot_fallback=True, path=path, ignore_retcode=True, @@ -1544,7 +1544,7 @@ def select(key, default=None): default_user not in users and retcode( name, - "id {}".format(default_user), + f"id {default_user}", python_shell=False, path=path, chroot_fallback=True, @@ -1563,7 +1563,7 @@ def select(key, default=None): encrypted=password_encrypted, ) except (SaltInvocationError, CommandExecutionError) as exc: - msg = "{}: Failed to set password".format(user) + exc.strerror + msg = f"{user}: Failed to set password" + exc.strerror # only hardfail in unrecoverable situation: # root cannot be setted up if user == "root": @@ -1591,11 +1591,11 @@ def select(key, default=None): if ret.get("result", True) and dnsservers: # retro compatibility, test also old markers gid = "/.lxc.initial_dns" - gids = [gid, "/lxc.initial_dns", "/lxc.{}.initial_dns".format(name)] + gids = [gid, "/lxc.initial_dns", f"/lxc.{name}.initial_dns"] if not any( retcode( name, - 'test -e "{}"'.format(x), + f'test -e "{x}"', chroot_fallback=True, path=path, ignore_retcode=True, @@ -1628,13 +1628,13 @@ def select(key, default=None): # retro compatibility, test also old markers if remove_seed_marker: - run(name, "rm -f '{}'".format(SEED_MARKER), path=path, python_shell=False) + run(name, f"rm -f '{SEED_MARKER}'", path=path, python_shell=False) gid = "/.lxc.initial_seed" gids = [gid, "/lxc.initial_seed"] if any( retcode( name, - "test -e {}".format(x), + f"test -e {x}", path=path, chroot_fallback=True, ignore_retcode=True, @@ -1703,7 +1703,7 @@ def select(key, default=None): try: stop(name, path=path) except (SaltInvocationError, CommandExecutionError) as exc: - ret["comment"] = "Unable to stop container: {}".format(exc) + ret["comment"] = f"Unable to stop container: {exc}" ret["result"] = False state_post = state(name, path=path) @@ -1711,7 +1711,7 @@ def select(key, default=None): changes.append({"state": {"old": state_pre, "new": state_post}}) if ret.get("result", True): - ret["comment"] = "Container '{}' successfully initialized".format(name) + ret["comment"] = f"Container '{name}' successfully initialized" ret["result"] = True if changes: ret["changes"] = changes_dict @@ -1834,8 +1834,8 @@ def _after_ignition_network_profile(cmd, ret, name, network_profile, path, nic_o # destroy the container if it was partially created cmd = "lxc-destroy" if path: - cmd += " -P {}".format(pipes.quote(path)) - cmd += " -n {}".format(name) + cmd += f" -P {shlex.quote(path)}" + cmd += f" -n {name}" __salt__["cmd.retcode"](cmd, python_shell=False) raise CommandExecutionError( "Container could not be created with cmd '{}': {}".format( @@ -1943,7 +1943,7 @@ def create( # Required params for 'download' template download_template_deps = ("dist", "release", "arch") - cmd = "lxc-create -n {}".format(name) + cmd = f"lxc-create -n {name}" profile = get_container_profile(copy.deepcopy(profile)) kw_overrides = copy.deepcopy(kwargs) @@ -1959,7 +1959,7 @@ def select(key, default=None): path = select("path") if exists(name, path=path): - raise CommandExecutionError("Container '{}' already exists".format(name)) + raise CommandExecutionError(f"Container '{name}' already exists") tvg = select("vgname") vgname = tvg if tvg else __salt__["config.get"]("lxc.vgname") @@ -1997,31 +1997,31 @@ def select(key, default=None): ) options["imgtar"] = img_tar if path: - cmd += " -P {}".format(pipes.quote(path)) + cmd += f" -P {shlex.quote(path)}" if not os.path.exists(path): os.makedirs(path) if config: - cmd += " -f {}".format(config) + cmd += f" -f {config}" if template: - cmd += " -t {}".format(template) + cmd += f" -t {template}" if backing: backing = backing.lower() - cmd += " -B {}".format(backing) + cmd += f" -B {backing}" if backing in ("zfs",): if zfsroot: - cmd += " --zfsroot {}".format(zfsroot) + cmd += f" --zfsroot {zfsroot}" if backing in ("lvm",): if lvname: - cmd += " --lvname {}".format(lvname) + cmd += f" --lvname {lvname}" if vgname: - cmd += " --vgname {}".format(vgname) + cmd += f" --vgname {vgname}" if thinpool: - cmd += " --thinpool {}".format(thinpool) + cmd += f" --thinpool {thinpool}" if backing not in ("dir", "overlayfs"): if fstype: - cmd += " --fstype {}".format(fstype) + cmd += f" --fstype {fstype}" if size: - cmd += " --fssize {}".format(size) + cmd += f" --fssize {size}" if options: if template == "download": @@ -2034,7 +2034,7 @@ def select(key, default=None): ) cmd += " --" for key, val in options.items(): - cmd += " --{} {}".format(key, val) + cmd += f" --{key} {val}" ret = __salt__["cmd.run_all"](cmd, python_shell=False) # please do not merge extra conflicting stuff @@ -2108,13 +2108,11 @@ def select(key, default=None): path = select("path") if exists(name, path=path): - raise CommandExecutionError("Container '{}' already exists".format(name)) + raise CommandExecutionError(f"Container '{name}' already exists") _ensure_exists(orig, path=path) if state(orig, path=path) != "stopped": - raise CommandExecutionError( - "Container '{}' must be stopped to be cloned".format(orig) - ) + raise CommandExecutionError(f"Container '{orig}' must be stopped to be cloned") backing = select("backing") snapshot = select("snapshot") @@ -2132,21 +2130,21 @@ def select(key, default=None): if Version(version()) >= Version("2.0"): # https://linuxcontainers.org/lxc/manpages//man1/lxc-copy.1.html cmd = "lxc-copy" - cmd += " {} -n {} -N {}".format(snapshot, orig, name) + cmd += f" {snapshot} -n {orig} -N {name}" else: # https://linuxcontainers.org/lxc/manpages//man1/lxc-clone.1.html cmd = "lxc-clone" - cmd += " {} -o {} -n {}".format(snapshot, orig, name) + cmd += f" {snapshot} -o {orig} -n {name}" if path: - cmd += " -P {}".format(pipes.quote(path)) + cmd += f" -P {shlex.quote(path)}" if not os.path.exists(path): os.makedirs(path) if backing: backing = backing.lower() - cmd += " -B {}".format(backing) + cmd += f" -B {backing}" if backing not in ("dir", "overlayfs"): if size: - cmd += " -L {}".format(size) + cmd += f" -L {size}" ret = __salt__["cmd.run_all"](cmd, python_shell=False) # please do not merge extra conflicting stuff # inside those two line (ret =, return) @@ -2177,7 +2175,7 @@ def ls_(active=None, cache=True, path=None): salt '*' lxc.ls salt '*' lxc.ls active=True """ - contextvar = "lxc.ls{}".format(path) + contextvar = f"lxc.ls{path}" if active: contextvar += ".active" if cache and (contextvar in __context__): @@ -2186,7 +2184,7 @@ def ls_(active=None, cache=True, path=None): ret = [] cmd = "lxc-ls" if path: - cmd += " -P {}".format(pipes.quote(path)) + cmd += f" -P {shlex.quote(path)}" if active: cmd += " --active" output = __salt__["cmd.run_stdout"](cmd, python_shell=False) @@ -2242,8 +2240,8 @@ def list_(extra=False, limit=None, path=None): for container in ctnrs: cmd = "lxc-info" if path: - cmd += " -P {}".format(pipes.quote(path)) - cmd += " -n {}".format(container) + cmd += f" -P {shlex.quote(path)}" + cmd += f" -n {container}" c_info = __salt__["cmd.run"](cmd, python_shell=False, output_loglevel="debug") c_state = None for line in c_info.splitlines(): @@ -2294,20 +2292,20 @@ def _change_state( return { "result": True, "state": {"old": expected, "new": expected}, - "comment": "Container '{}' already {}".format(name, expected), + "comment": f"Container '{name}' already {expected}", } if cmd == "lxc-destroy": # Kill the container first scmd = "lxc-stop" if path: - scmd += " -P {}".format(pipes.quote(path)) - scmd += " -k -n {}".format(name) + scmd += f" -P {shlex.quote(path)}" + scmd += f" -k -n {name}" __salt__["cmd.run"](scmd, python_shell=False) if path and " -P " not in cmd: - cmd += " -P {}".format(pipes.quote(path)) - cmd += " -n {}".format(name) + cmd += f" -P {shlex.quote(path)}" + cmd += f" -n {name}" # certain lxc commands need to be taken with care (lxc-start) # as te command itself mess with double forks; we must not @@ -2337,8 +2335,8 @@ def _change_state( # some commands do not wait, so we will rcmd = "lxc-wait" if path: - rcmd += " -P {}".format(pipes.quote(path)) - rcmd += " -n {} -s {}".format(name, expected.upper()) + rcmd += f" -P {shlex.quote(path)}" + rcmd += f" -n {name} -s {expected.upper()}" __salt__["cmd.run"](rcmd, python_shell=False, timeout=30) _clear_context() post = state(name, path=path) @@ -2351,7 +2349,7 @@ def _ensure_exists(name, path=None): Raise an exception if the container does not exist """ if not exists(name, path=path): - raise CommandExecutionError("Container '{}' does not exist".format(name)) + raise CommandExecutionError(f"Container '{name}' does not exist") def _ensure_running(name, no_start=False, path=None): @@ -2373,11 +2371,11 @@ def _ensure_running(name, no_start=False, path=None): return start(name, path=path) elif pre == "stopped": if no_start: - raise CommandExecutionError("Container '{}' is not running".format(name)) + raise CommandExecutionError(f"Container '{name}' is not running") return start(name, path=path) elif pre == "frozen": if no_start: - raise CommandExecutionError("Container '{}' is not running".format(name)) + raise CommandExecutionError(f"Container '{name}' is not running") return unfreeze(name, path=path) @@ -2459,13 +2457,11 @@ def start(name, **kwargs): lxc_config = os.path.join(cpath, name, "config") # we try to start, even without config, if global opts are there if os.path.exists(lxc_config): - cmd += " -f {}".format(pipes.quote(lxc_config)) + cmd += f" -f {shlex.quote(lxc_config)}" cmd += " -d" _ensure_exists(name, path=path) if state(name, path=path) == "frozen": - raise CommandExecutionError( - "Container '{}' is frozen, use lxc.unfreeze".format(name) - ) + raise CommandExecutionError(f"Container '{name}' is frozen, use lxc.unfreeze") # lxc-start daemonize itself violently, we must not communicate with it use_vt = kwargs.get("use_vt", None) with_communicate = kwargs.get("with_communicate", False) @@ -2560,11 +2556,11 @@ def freeze(name, **kwargs): start_ = kwargs.get("start", False) if orig_state == "stopped": if not start_: - raise CommandExecutionError("Container '{}' is stopped".format(name)) + raise CommandExecutionError(f"Container '{name}' is stopped") start(name, path=path) cmd = "lxc-freeze" if path: - cmd += " -P {}".format(pipes.quote(path)) + cmd += f" -P {shlex.quote(path)}" ret = _change_state(cmd, name, "frozen", use_vt=use_vt, path=path) if orig_state == "stopped" and start_: ret["state"]["old"] = orig_state @@ -2596,10 +2592,10 @@ def unfreeze(name, path=None, use_vt=None): """ _ensure_exists(name, path=path) if state(name, path=path) == "stopped": - raise CommandExecutionError("Container '{}' is stopped".format(name)) + raise CommandExecutionError(f"Container '{name}' is stopped") cmd = "lxc-unfreeze" if path: - cmd += " -P {}".format(pipes.quote(path)) + cmd += f" -P {shlex.quote(path)}" return _change_state(cmd, name, "running", path=path, use_vt=use_vt) @@ -2635,7 +2631,7 @@ def destroy(name, stop=False, path=None): """ _ensure_exists(name, path=path) if not stop and state(name, path=path) != "stopped": - raise CommandExecutionError("Container '{}' is not stopped".format(name)) + raise CommandExecutionError(f"Container '{name}' is not stopped") return _change_state("lxc-destroy", name, None, path=path) @@ -2684,7 +2680,7 @@ def state(name, path=None): """ # Don't use _ensure_exists() here, it will mess with _change_state() - cachekey = "lxc.state.{}{}".format(name, path) + cachekey = f"lxc.state.{name}{path}" try: return __context__[cachekey] except KeyError: @@ -2693,13 +2689,13 @@ def state(name, path=None): else: cmd = "lxc-info" if path: - cmd += " -P {}".format(pipes.quote(path)) - cmd += " -n {}".format(name) + cmd += f" -P {shlex.quote(path)}" + cmd += f" -n {name}" ret = __salt__["cmd.run_all"](cmd, python_shell=False) if ret["retcode"] != 0: _clear_context() raise CommandExecutionError( - "Unable to get state of container '{}'".format(name) + f"Unable to get state of container '{name}'" ) c_infos = ret["stdout"].splitlines() c_state = None @@ -2731,13 +2727,11 @@ def get_parameter(name, parameter, path=None): _ensure_exists(name, path=path) cmd = "lxc-cgroup" if path: - cmd += " -P {}".format(pipes.quote(path)) - cmd += " -n {} {}".format(name, parameter) + cmd += f" -P {shlex.quote(path)}" + cmd += f" -n {name} {parameter}" ret = __salt__["cmd.run_all"](cmd, python_shell=False) if ret["retcode"] != 0: - raise CommandExecutionError( - "Unable to retrieve value for '{}'".format(parameter) - ) + raise CommandExecutionError(f"Unable to retrieve value for '{parameter}'") return ret["stdout"].strip() @@ -2762,8 +2756,8 @@ def set_parameter(name, parameter, value, path=None): cmd = "lxc-cgroup" if path: - cmd += " -P {}".format(pipes.quote(path)) - cmd += " -n {} {} {}".format(name, parameter, value) + cmd += f" -P {shlex.quote(path)}" + cmd += f" -n {name} {parameter} {value}" ret = __salt__["cmd.run_all"](cmd, python_shell=False) if ret["retcode"] != 0: return False @@ -2787,7 +2781,7 @@ def info(name, path=None): salt '*' lxc.info name """ - cachekey = "lxc.info.{}{}".format(name, path) + cachekey = f"lxc.info.{name}{path}" try: return __context__[cachekey] except KeyError: @@ -2799,9 +2793,7 @@ def info(name, path=None): conf_file = os.path.join(cpath, str(name), "config") if not os.path.isfile(conf_file): - raise CommandExecutionError( - "LXC config file {} does not exist".format(conf_file) - ) + raise CommandExecutionError(f"LXC config file {conf_file} does not exist") ret = {} config = [] @@ -3000,9 +2992,7 @@ def update_lxc_conf(name, lxc_conf, lxc_conf_unset, path=None): cpath = get_root_path(path) lxc_conf_p = os.path.join(cpath, name, "config") if not os.path.exists(lxc_conf_p): - raise SaltInvocationError( - "Configuration file {} does not exist".format(lxc_conf_p) - ) + raise SaltInvocationError(f"Configuration file {lxc_conf_p} does not exist") changes = {"edited": [], "added": [], "removed": []} ret = {"changes": changes, "result": True, "comment": ""} @@ -3054,17 +3044,15 @@ def update_lxc_conf(name, lxc_conf, lxc_conf_unset, path=None): conf = "" for key, val in dest_lxc_conf: if not val: - conf += "{}\n".format(key) + conf += f"{key}\n" else: - conf += "{} = {}\n".format(key.strip(), val.strip()) + conf += f"{key.strip()} = {val.strip()}\n" conf_changed = conf != orig_config chrono = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S") if conf_changed: # DO NOT USE salt.utils.files.fopen here, i got (kiorky) # problems with lxc configs which were wiped ! - with salt.utils.files.fopen( - "{}.{}".format(lxc_conf_p, chrono), "w" - ) as wfic: + with salt.utils.files.fopen(f"{lxc_conf_p}.{chrono}", "w") as wfic: wfic.write(salt.utils.stringutils.to_str(conf)) with salt.utils.files.fopen(lxc_conf_p, "w") as wfic: wfic.write(salt.utils.stringutils.to_str(conf)) @@ -3113,8 +3101,8 @@ def set_dns(name, dnsservers=None, searchdomains=None, path=None): searchdomains = searchdomains.split(",") except AttributeError: raise SaltInvocationError("Invalid input for 'searchdomains' parameter") - dns = ["nameserver {}".format(x) for x in dnsservers] - dns.extend(["search {}".format(x) for x in searchdomains]) + dns = [f"nameserver {x}" for x in dnsservers] + dns.extend([f"search {x}" for x in searchdomains]) dns = "\n".join(dns) + "\n" # we may be using resolvconf in the container # We need to handle that case with care: @@ -3129,7 +3117,7 @@ def set_dns(name, dnsservers=None, searchdomains=None, path=None): # - We finally also set /etc/resolv.conf in all cases rstr = __salt__["test.random_hash"]() # no tmp here, apparmor won't let us execute ! - script = "/sbin/{}_dns.sh".format(rstr) + script = f"/sbin/{rstr}_dns.sh" DNS_SCRIPT = "\n".join( [ # 'set -x', @@ -3153,7 +3141,7 @@ def set_dns(name, dnsservers=None, searchdomains=None, path=None): ] ) result = run_all( - name, "tee {}".format(script), path=path, stdin=DNS_SCRIPT, python_shell=True + name, f"tee {script}", path=path, stdin=DNS_SCRIPT, python_shell=True ) if result["retcode"] == 0: result = run_all( @@ -3170,7 +3158,7 @@ def set_dns(name, dnsservers=None, searchdomains=None, path=None): python_shell=True, ) if result["retcode"] != 0: - error = "Unable to write to /etc/resolv.conf in container '{}'".format(name) + error = f"Unable to write to /etc/resolv.conf in container '{name}'" if result["stderr"]: error += ": {}".format(result["stderr"]) raise CommandExecutionError(error) @@ -3193,12 +3181,12 @@ def running_systemd(name, cache=True, path=None): salt '*' lxc.running_systemd ubuntu """ - k = "lxc.systemd.test.{}{}".format(name, path) + k = f"lxc.systemd.test.{name}{path}" ret = __context__.get(k, None) if ret is None or not cache: rstr = __salt__["test.random_hash"]() # no tmp here, apparmor won't let us execute ! - script = "/sbin/{}_testsystemd.sh".format(rstr) + script = f"/sbin/{rstr}_testsystemd.sh" # ubuntu already had since trusty some bits of systemd but was # still using upstart ... # we need to be a bit more careful that just testing that systemd @@ -3227,7 +3215,7 @@ def running_systemd(name, cache=True, path=None): """ ) result = run_all( - name, "tee {}".format(script), path=path, stdin=_script, python_shell=True + name, f"tee {script}", path=path, stdin=_script, python_shell=True ) if result["retcode"] == 0: result = run_all( @@ -3237,9 +3225,7 @@ def running_systemd(name, cache=True, path=None): python_shell=True, ) else: - raise CommandExecutionError( - "lxc {} failed to copy initd tester".format(name) - ) + raise CommandExecutionError(f"lxc {name} failed to copy initd tester") run_all( name, 'sh -c \'if [ -f "{0}" ];then rm -f "{0}";fi\''.format(script), @@ -3361,9 +3347,9 @@ def wait_started(name, path=None, timeout=300): """ if not exists(name, path=path): - raise CommandExecutionError("Container {} does does exists".format(name)) + raise CommandExecutionError(f"Container {name} does does exists") if not state(name, path=path) == "running": - raise CommandExecutionError("Container {} is not running".format(name)) + raise CommandExecutionError(f"Container {name} is not running") ret = False if running_systemd(name, path=path): test_started = test_sd_started_state @@ -3520,7 +3506,7 @@ def bootstrap( seeded = ( retcode( name, - "test -e '{}'".format(SEED_MARKER), + f"test -e '{SEED_MARKER}'", path=path, chroot_fallback=True, ignore_retcode=True, @@ -3543,9 +3529,9 @@ def bootstrap( if needs_install or force_install or unconditional_install: if install: rstr = __salt__["test.random_hash"]() - configdir = "/var/tmp/.c_{}".format(rstr) + configdir = f"/var/tmp/.c_{rstr}" - cmd = "install -m 0700 -d {}".format(configdir) + cmd = f"install -m 0700 -d {configdir}" if run_all(name, cmd, path=path, python_shell=False)["retcode"] != 0: log.error("tmpdir %s creation failed %s", configdir, cmd) return False @@ -3553,11 +3539,11 @@ def bootstrap( bs_ = __salt__["config.gather_bootstrap_script"]( bootstrap=bootstrap_url ) - script = "/sbin/{}_bootstrap.sh".format(rstr) + script = f"/sbin/{rstr}_bootstrap.sh" copy_to(name, bs_, script, path=path) result = run_all( name, - 'sh -c "chmod +x {}"'.format(script), + f'sh -c "chmod +x {script}"', path=path, python_shell=True, ) @@ -3631,7 +3617,7 @@ def bootstrap( freeze(name, path=path) # mark seeded upon successful install if ret: - run(name, "touch '{}'".format(SEED_MARKER), path=path, python_shell=False) + run(name, f"touch '{SEED_MARKER}'", path=path, python_shell=False) return ret @@ -3652,7 +3638,7 @@ def attachable(name, path=None): salt 'minion' lxc.attachable ubuntu """ - cachekey = "lxc.attachable{}{}".format(name, path) + cachekey = f"lxc.attachable{name}{path}" try: return __context__[cachekey] except KeyError: @@ -3662,8 +3648,8 @@ def attachable(name, path=None): log.debug("Checking if LXC container %s is attachable", name) cmd = "lxc-attach" if path: - cmd += " -P {}".format(pipes.quote(path)) - cmd += " --clear-env -n {} -- /usr/bin/env".format(name) + cmd += f" -P {shlex.quote(path)}" + cmd += f" --clear-env -n {name} -- /usr/bin/env" result = ( __salt__["cmd.retcode"]( cmd, python_shell=False, output_loglevel="quiet", ignore_retcode=True @@ -3719,7 +3705,7 @@ def _run( ) else: if not chroot_fallback: - raise CommandExecutionError("{} is not attachable.".format(name)) + raise CommandExecutionError(f"{name} is not attachable.") rootfs = info(name, path=path).get("rootfs") # Set context var to make cmd.run_chroot run cmd.run instead of # cmd.run_all. @@ -4214,7 +4200,7 @@ def _get_md5(name, path): Get the MD5 checksum of a file from a container """ output = run_stdout( - name, 'md5sum "{}"'.format(path), chroot_fallback=True, ignore_retcode=True + name, f'md5sum "{path}"', chroot_fallback=True, ignore_retcode=True ) try: return output.split()[0] @@ -4381,7 +4367,7 @@ def write_conf(conf_file, conf): line[key], (str, (str,), (int,), float), ): - out_line = " = ".join((key, "{}".format(line[key]))) + out_line = " = ".join((key, f"{line[key]}")) elif isinstance(line[key], dict): out_line = " = ".join((key, line[key]["value"])) if "comment" in line[key]: @@ -4474,7 +4460,7 @@ def edit_conf( net_changes = _config_list( conf, only_net=True, - **{"network_profile": DEFAULT_NIC, "nic_opts": nic_opts} + **{"network_profile": DEFAULT_NIC, "nic_opts": nic_opts}, ) if net_changes: lxc_config.extend(net_changes) @@ -4524,20 +4510,20 @@ def reboot(name, path=None): salt 'minion' lxc.reboot myvm """ - ret = {"result": True, "changes": {}, "comment": "{} rebooted".format(name)} + ret = {"result": True, "changes": {}, "comment": f"{name} rebooted"} does_exist = exists(name, path=path) if does_exist and (state(name, path=path) == "running"): try: stop(name, path=path) except (SaltInvocationError, CommandExecutionError) as exc: - ret["comment"] = "Unable to stop container: {}".format(exc) + ret["comment"] = f"Unable to stop container: {exc}" ret["result"] = False return ret if does_exist and (state(name, path=path) != "running"): try: start(name, path=path) except (SaltInvocationError, CommandExecutionError) as exc: - ret["comment"] = "Unable to stop container: {}".format(exc) + ret["comment"] = f"Unable to stop container: {exc}" ret["result"] = False return ret ret["changes"][name] = "rebooted" @@ -4559,7 +4545,7 @@ def reconfigure( utsname=None, rootfs=None, path=None, - **kwargs + **kwargs, ): """ Reconfigure a container. @@ -4625,7 +4611,7 @@ def reconfigure( path = os.path.join(cpath, name, "config") ret = { "name": name, - "comment": "config for {} up to date".format(name), + "comment": f"config for {name} up to date", "result": True, "changes": changes, } @@ -4677,7 +4663,7 @@ def select(key, default=None): edit_conf(path, out_format="commented", lxc_config=new_cfg) chunks = read_conf(path, out_format="commented") if old_chunks != chunks: - ret["comment"] = "{} lxc config updated".format(name) + ret["comment"] = f"{name} lxc config updated" if state(name, path=path) == "running": cret = reboot(name, path=path) ret["result"] = cret["result"] @@ -4763,9 +4749,9 @@ def get_pid(name, path=None): """ if name not in list_(limit="running", path=path): raise CommandExecutionError( - "Container {} is not running, can't determine PID".format(name) + f"Container {name} is not running, can't determine PID" ) - info = __salt__["cmd.run"]("lxc-info -n {}".format(name)).split("\n") + info = __salt__["cmd.run"](f"lxc-info -n {name}").split("\n") pid = [ line.split(":")[1].strip() for line in info @@ -4812,21 +4798,19 @@ def add_veth(name, interface_name, bridge=None, path=None): raise CommandExecutionError( "Directory /var/run required for lxc.add_veth doesn't exists" ) - if not __salt__["file.file_exists"]("/proc/{}/ns/net".format(pid)): + if not __salt__["file.file_exists"](f"/proc/{pid}/ns/net"): raise CommandExecutionError( - "Proc file for container {} network namespace doesn't exists".format(name) + f"Proc file for container {name} network namespace doesn't exists" ) if not __salt__["file.directory_exists"]("/var/run/netns"): __salt__["file.mkdir"]("/var/run/netns") # Ensure that the symlink is up to date (change on container restart) - if __salt__["file.is_link"]("/var/run/netns/{}".format(name)): - __salt__["file.remove"]("/var/run/netns/{}".format(name)) + if __salt__["file.is_link"](f"/var/run/netns/{name}"): + __salt__["file.remove"](f"/var/run/netns/{name}") - __salt__["file.symlink"]( - "/proc/{}/ns/net".format(pid), "/var/run/netns/{}".format(name) - ) + __salt__["file.symlink"](f"/proc/{pid}/ns/net", f"/var/run/netns/{name}") # Ensure that interface doesn't exists interface_exists = 0 == __salt__["cmd.retcode"]( @@ -4851,12 +4835,10 @@ def add_veth(name, interface_name, bridge=None, path=None): ) != 0 ): + raise CommandExecutionError(f"Error while creating the veth pair {random_veth}") + if __salt__["cmd.retcode"](f"ip link set dev {random_veth} up") != 0: raise CommandExecutionError( - "Error while creating the veth pair {}".format(random_veth) - ) - if __salt__["cmd.retcode"]("ip link set dev {} up".format(random_veth)) != 0: - raise CommandExecutionError( - "Error while bringing up host-side veth {}".format(random_veth) + f"Error while bringing up host-side veth {random_veth}" ) # Attach it to the container @@ -4872,7 +4854,7 @@ def add_veth(name, interface_name, bridge=None, path=None): ) ) - __salt__["file.remove"]("/var/run/netns/{}".format(name)) + __salt__["file.remove"](f"/var/run/netns/{name}") if bridge is not None: __salt__["bridge.addif"](bridge, random_veth) diff --git a/salt/modules/mac_keychain.py b/salt/modules/mac_keychain.py index a823c428b760..978d214ebf26 100644 --- a/salt/modules/mac_keychain.py +++ b/salt/modules/mac_keychain.py @@ -11,20 +11,6 @@ import salt.utils.platform -try: - import pipes - - HAS_DEPS = True -except ImportError: - HAS_DEPS = False - -if hasattr(shlex, "quote"): - _quote = shlex.quote -elif HAS_DEPS and hasattr(pipes, "quote"): - _quote = pipes.quote -else: - _quote = None - log = logging.getLogger(__name__) __virtualname__ = "keychain" @@ -34,7 +20,7 @@ def __virtual__(): """ Only work on Mac OS """ - if salt.utils.platform.is_darwin() and _quote is not None: + if salt.utils.platform.is_darwin(): return __virtualname__ return (False, "Only available on Mac OS systems with pipes") @@ -82,7 +68,7 @@ def install( if keychain_password is not None: unlock_keychain(keychain, keychain_password) - cmd = "security import {} -P {} -k {}".format(cert, password, keychain) + cmd = f"security import {cert} -P {password} -k {keychain}" if allow_any: cmd += " -A" return __salt__["cmd.run"](cmd) @@ -117,7 +103,7 @@ def uninstall( if keychain_password is not None: unlock_keychain(keychain, keychain_password) - cmd = 'security delete-certificate -c "{}" {}'.format(cert_name, keychain) + cmd = f'security delete-certificate -c "{cert_name}" {keychain}' return __salt__["cmd.run"](cmd) @@ -136,14 +122,14 @@ def list_certs(keychain="/Library/Keychains/System.keychain"): salt '*' keychain.list_certs """ cmd = ( - 'security find-certificate -a {} | grep -o "alis".*\\" | ' - "grep -o '\\\"[-A-Za-z0-9.:() ]*\\\"'".format(_quote(keychain)) + 'security find-certificate -a {} | grep -o "alis.*" | ' + "grep -o '\\\"[-A-Za-z0-9.:() ]*\\\"'".format(shlex.quote(keychain)) ) out = __salt__["cmd.run"](cmd, python_shell=True) return out.replace('"', "").split("\n") -def get_friendly_name(cert, password): +def get_friendly_name(cert, password, legacy=False): """ Get the friendly name of the given certificate @@ -157,15 +143,26 @@ def get_friendly_name(cert, password): Note: The password given here will show up as plaintext in the returned job info. + legacy + Assume legacy format for certificate. + CLI Example: .. code-block:: bash salt '*' keychain.get_friendly_name /tmp/test.p12 test123 + + salt '*' keychain.get_friendly_name /tmp/test.p12 test123 legacy=True """ + openssl_cmd = "openssl pkcs12" + if legacy: + openssl_cmd = f"{openssl_cmd} -legacy" + cmd = ( - "openssl pkcs12 -in {} -passin pass:{} -info -nodes -nokeys 2> /dev/null | " - "grep friendlyName:".format(_quote(cert), _quote(password)) + "{} -in {} -passin pass:{} -info -nodes -nokeys 2> /dev/null | " + "grep friendlyName:".format( + openssl_cmd, shlex.quote(cert), shlex.quote(password) + ) ) out = __salt__["cmd.run"](cmd, python_shell=True) return out.replace("friendlyName: ", "").strip() @@ -187,7 +184,7 @@ def get_default_keychain(user=None, domain="user"): salt '*' keychain.get_default_keychain """ - cmd = "security default-keychain -d {}".format(domain) + cmd = f"security default-keychain -d {domain}" return __salt__["cmd.run"](cmd, runas=user) @@ -210,7 +207,7 @@ def set_default_keychain(keychain, domain="user", user=None): salt '*' keychain.set_keychain /Users/fred/Library/Keychains/login.keychain """ - cmd = "security default-keychain -d {} -s {}".format(domain, keychain) + cmd = f"security default-keychain -d {domain} -s {keychain}" return __salt__["cmd.run"](cmd, runas=user) @@ -233,7 +230,7 @@ def unlock_keychain(keychain, password): salt '*' keychain.unlock_keychain /tmp/test.p12 test123 """ - cmd = "security unlock-keychain -p {} {}".format(password, keychain) + cmd = f"security unlock-keychain -p {password} {keychain}" __salt__["cmd.run"](cmd) @@ -261,7 +258,7 @@ def get_hash(name, password=None): name, password ) else: - cmd = 'security find-certificate -c "{}" -m -p'.format(name) + cmd = f'security find-certificate -c "{name}" -m -p' out = __salt__["cmd.run"](cmd) matches = re.search( diff --git a/salt/modules/mac_power.py b/salt/modules/mac_power.py index 01fc561e834b..efdca6528465 100644 --- a/salt/modules/mac_power.py +++ b/salt/modules/mac_power.py @@ -68,7 +68,7 @@ def _validate_sleep(minutes): ) raise SaltInvocationError(msg) else: - msg = "Unknown Variable Type Passed for Minutes.\nPassed: {}".format(minutes) + msg = f"Unknown Variable Type Passed for Minutes.\nPassed: {minutes}" raise SaltInvocationError(msg) @@ -115,7 +115,7 @@ def set_sleep(minutes): salt '*' power.set_sleep never """ value = _validate_sleep(minutes) - cmd = "systemsetup -setsleep {}".format(value) + cmd = f"systemsetup -setsleep {value}" salt.utils.mac_utils.execute_return_success(cmd) state = [] @@ -165,7 +165,7 @@ def set_computer_sleep(minutes): salt '*' power.set_computer_sleep off """ value = _validate_sleep(minutes) - cmd = "systemsetup -setcomputersleep {}".format(value) + cmd = f"systemsetup -setcomputersleep {value}" salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated( @@ -210,7 +210,7 @@ def set_display_sleep(minutes): salt '*' power.set_display_sleep off """ value = _validate_sleep(minutes) - cmd = "systemsetup -setdisplaysleep {}".format(value) + cmd = f"systemsetup -setdisplaysleep {value}" salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated( @@ -255,7 +255,7 @@ def set_harddisk_sleep(minutes): salt '*' power.set_harddisk_sleep off """ value = _validate_sleep(minutes) - cmd = "systemsetup -setharddisksleep {}".format(value) + cmd = f"systemsetup -setharddisksleep {value}" salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated( @@ -303,12 +303,13 @@ def set_wake_on_modem(enabled): salt '*' power.set_wake_on_modem True """ state = salt.utils.mac_utils.validate_enabled(enabled) - cmd = "systemsetup -setwakeonmodem {}".format(state) + cmd = f"systemsetup -setwakeonmodem {state}" salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated( state, get_wake_on_modem, + True, ) @@ -353,12 +354,13 @@ def set_wake_on_network(enabled): salt '*' power.set_wake_on_network True """ state = salt.utils.mac_utils.validate_enabled(enabled) - cmd = "systemsetup -setwakeonnetworkaccess {}".format(state) + cmd = f"systemsetup -setwakeonnetworkaccess {state}" salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated( state, get_wake_on_network, + True, ) @@ -403,12 +405,13 @@ def set_restart_power_failure(enabled): salt '*' power.set_restart_power_failure True """ state = salt.utils.mac_utils.validate_enabled(enabled) - cmd = "systemsetup -setrestartpowerfailure {}".format(state) + cmd = f"systemsetup -setrestartpowerfailure {state}" salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated( state, get_restart_power_failure, + True, ) @@ -453,7 +456,7 @@ def set_restart_freeze(enabled): salt '*' power.set_restart_freeze True """ state = salt.utils.mac_utils.validate_enabled(enabled) - cmd = "systemsetup -setrestartfreeze {}".format(state) + cmd = f"systemsetup -setrestartfreeze {state}" salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated(state, get_restart_freeze, True) @@ -502,10 +505,11 @@ def set_sleep_on_power_button(enabled): salt '*' power.set_sleep_on_power_button True """ state = salt.utils.mac_utils.validate_enabled(enabled) - cmd = "systemsetup -setallowpowerbuttontosleepcomputer {}".format(state) + cmd = f"systemsetup -setallowpowerbuttontosleepcomputer {state}" salt.utils.mac_utils.execute_return_success(cmd) return salt.utils.mac_utils.confirm_updated( state, get_sleep_on_power_button, + True, ) diff --git a/salt/modules/mac_service.py b/salt/modules/mac_service.py index 2932d0836514..39dd6dd0f67e 100644 --- a/salt/modules/mac_service.py +++ b/salt/modules/mac_service.py @@ -143,7 +143,7 @@ def _get_service(name): # so we need to raise that the service could not be found. try: if not __context__["using_cached_services"]: - raise CommandExecutionError("Service not found: {}".format(name)) + raise CommandExecutionError(f"Service not found: {name}") except KeyError: pass @@ -151,7 +151,7 @@ def _get_service(name): # state then there is no reason to check again. # fixes https://github.com/saltstack/salt/issues/57907 if __context__.get("service.state") == "dead": - raise CommandExecutionError("Service not found: {}".format(name)) + raise CommandExecutionError(f"Service not found: {name}") # we used a cached version to check, a service could have been made # between now and then, we should refresh our available services. @@ -162,7 +162,7 @@ def _get_service(name): if not service: # Could not find the service after refresh raise. - raise CommandExecutionError("Service not found: {}".format(name)) + raise CommandExecutionError(f"Service not found: {name}") # found it :) return service @@ -240,7 +240,7 @@ def _get_domain_target(name, service_target=False): if "LaunchAgents" in path: # Get the console user so we can service in the correct session uid = __utils__["mac_utils.console_user"]() - domain_target = "gui/{}".format(uid) + domain_target = f"gui/{uid}" # check to see if we need to make it a full service target. if service_target is True: @@ -638,7 +638,8 @@ def disabled(name, runas=None, domain="system"): if name != srv_name: pass else: - return True if "true" in status.lower() else False + matches = ["true", "disabled"] + return True if any([x in status.lower() for x in matches]) else False return False diff --git a/salt/modules/mac_system.py b/salt/modules/mac_system.py index 1dd0aa8ea29a..ad64bc6badc6 100644 --- a/salt/modules/mac_system.py +++ b/salt/modules/mac_system.py @@ -10,6 +10,7 @@ import getpass import shlex +import salt.utils.mac_utils import salt.utils.platform from salt.exceptions import CommandExecutionError, SaltInvocationError @@ -71,7 +72,7 @@ def _execute_command(cmd, at_time=None): Returns: bool """ if at_time: - cmd = "echo '{}' | at {}".format(cmd, shlex.quote(at_time)) + cmd = f"echo '{cmd}' | at {shlex.quote(at_time)}" return not bool(__salt__["cmd.retcode"](cmd, python_shell=True)) @@ -204,10 +205,10 @@ def get_remote_login(): salt '*' system.get_remote_login """ - ret = __utils__["mac_utils.execute_return_result"]("systemsetup -getremotelogin") + ret = salt.utils.mac_utils.execute_return_result("systemsetup -getremotelogin") - enabled = __utils__["mac_utils.validate_enabled"]( - __utils__["mac_utils.parse_return"](ret) + enabled = salt.utils.mac_utils.validate_enabled( + salt.utils.mac_utils.parse_return(ret) ) return enabled == "on" @@ -230,12 +231,12 @@ def set_remote_login(enable): salt '*' system.set_remote_login True """ - state = __utils__["mac_utils.validate_enabled"](enable) + state = salt.utils.mac_utils.validate_enabled(enable) - cmd = "systemsetup -f -setremotelogin {}".format(state) - __utils__["mac_utils.execute_return_success"](cmd) + cmd = f"systemsetup -f -setremotelogin {state}" + salt.utils.mac_utils.execute_return_success(cmd) - return __utils__["mac_utils.confirm_updated"]( + return salt.utils.mac_utils.confirm_updated( state, get_remote_login, normalize_ret=True ) @@ -253,12 +254,12 @@ def get_remote_events(): salt '*' system.get_remote_events """ - ret = __utils__["mac_utils.execute_return_result"]( + ret = salt.utils.mac_utils.execute_return_result( "systemsetup -getremoteappleevents" ) - enabled = __utils__["mac_utils.validate_enabled"]( - __utils__["mac_utils.parse_return"](ret) + enabled = salt.utils.mac_utils.validate_enabled( + salt.utils.mac_utils.parse_return(ret) ) return enabled == "on" @@ -282,12 +283,12 @@ def set_remote_events(enable): salt '*' system.set_remote_events On """ - state = __utils__["mac_utils.validate_enabled"](enable) + state = salt.utils.mac_utils.validate_enabled(enable) - cmd = "systemsetup -setremoteappleevents {}".format(state) - __utils__["mac_utils.execute_return_success"](cmd) + cmd = f"systemsetup -setremoteappleevents {state}" + salt.utils.mac_utils.execute_return_success(cmd) - return __utils__["mac_utils.confirm_updated"]( + return salt.utils.mac_utils.confirm_updated( state, get_remote_events, normalize_ret=True, @@ -307,9 +308,9 @@ def get_computer_name(): salt '*' system.get_computer_name """ - ret = __utils__["mac_utils.execute_return_result"]("scutil --get ComputerName") + ret = salt.utils.mac_utils.execute_return_result("scutil --get ComputerName") - return __utils__["mac_utils.parse_return"](ret) + return salt.utils.mac_utils.parse_return(ret) def set_computer_name(name): @@ -327,10 +328,10 @@ def set_computer_name(name): salt '*' system.set_computer_name "Mike's Mac" """ - cmd = 'scutil --set ComputerName "{}"'.format(name) - __utils__["mac_utils.execute_return_success"](cmd) + cmd = f'scutil --set ComputerName "{name}"' + salt.utils.mac_utils.execute_return_success(cmd) - return __utils__["mac_utils.confirm_updated"]( + return salt.utils.mac_utils.confirm_updated( name, get_computer_name, ) @@ -349,11 +350,9 @@ def get_subnet_name(): salt '*' system.get_subnet_name """ - ret = __utils__["mac_utils.execute_return_result"]( - "systemsetup -getlocalsubnetname" - ) + ret = salt.utils.mac_utils.execute_return_result("systemsetup -getlocalsubnetname") - return __utils__["mac_utils.parse_return"](ret) + return salt.utils.mac_utils.parse_return(ret) def set_subnet_name(name): @@ -375,10 +374,10 @@ def set_subnet_name(name): The following will be set as 'Mikes-Mac' salt '*' system.set_subnet_name "Mike's Mac" """ - cmd = 'systemsetup -setlocalsubnetname "{}"'.format(name) - __utils__["mac_utils.execute_return_success"](cmd) + cmd = f'systemsetup -setlocalsubnetname "{name}"' + salt.utils.mac_utils.execute_return_success(cmd) - return __utils__["mac_utils.confirm_updated"]( + return salt.utils.mac_utils.confirm_updated( name, get_subnet_name, ) @@ -397,9 +396,9 @@ def get_startup_disk(): salt '*' system.get_startup_disk """ - ret = __utils__["mac_utils.execute_return_result"]("systemsetup -getstartupdisk") + ret = salt.utils.mac_utils.execute_return_result("systemsetup -getstartupdisk") - return __utils__["mac_utils.parse_return"](ret) + return salt.utils.mac_utils.parse_return(ret) def list_startup_disks(): @@ -415,7 +414,7 @@ def list_startup_disks(): salt '*' system.list_startup_disks """ - ret = __utils__["mac_utils.execute_return_result"]("systemsetup -liststartupdisks") + ret = salt.utils.mac_utils.execute_return_result("systemsetup -liststartupdisks") return ret.splitlines() @@ -445,10 +444,10 @@ def set_startup_disk(path): ) raise SaltInvocationError(msg) - cmd = "systemsetup -setstartupdisk {}".format(path) - __utils__["mac_utils.execute_return_result"](cmd) + cmd = f"systemsetup -setstartupdisk {path}" + salt.utils.mac_utils.execute_return_result(cmd) - return __utils__["mac_utils.confirm_updated"]( + return salt.utils.mac_utils.confirm_updated( path, get_startup_disk, ) @@ -469,11 +468,11 @@ def get_restart_delay(): salt '*' system.get_restart_delay """ - ret = __utils__["mac_utils.execute_return_result"]( + ret = salt.utils.mac_utils.execute_return_result( "systemsetup -getwaitforstartupafterpowerfailure" ) - return __utils__["mac_utils.parse_return"](ret) + return salt.utils.mac_utils.parse_return(ret) def set_restart_delay(seconds): @@ -512,10 +511,10 @@ def set_restart_delay(seconds): ) raise SaltInvocationError(msg) - cmd = "systemsetup -setwaitforstartupafterpowerfailure {}".format(seconds) - __utils__["mac_utils.execute_return_success"](cmd) + cmd = f"systemsetup -setwaitforstartupafterpowerfailure {seconds}" + salt.utils.mac_utils.execute_return_success(cmd) - return __utils__["mac_utils.confirm_updated"]( + return salt.utils.mac_utils.confirm_updated( seconds, get_restart_delay, ) @@ -535,12 +534,12 @@ def get_disable_keyboard_on_lock(): salt '*' system.get_disable_keyboard_on_lock """ - ret = __utils__["mac_utils.execute_return_result"]( + ret = salt.utils.mac_utils.execute_return_result( "systemsetup -getdisablekeyboardwhenenclosurelockisengaged" ) - enabled = __utils__["mac_utils.validate_enabled"]( - __utils__["mac_utils.parse_return"](ret) + enabled = salt.utils.mac_utils.validate_enabled( + salt.utils.mac_utils.parse_return(ret) ) return enabled == "on" @@ -564,12 +563,12 @@ def set_disable_keyboard_on_lock(enable): salt '*' system.set_disable_keyboard_on_lock False """ - state = __utils__["mac_utils.validate_enabled"](enable) + state = salt.utils.mac_utils.validate_enabled(enable) - cmd = "systemsetup -setdisablekeyboardwhenenclosurelockisengaged {}".format(state) - __utils__["mac_utils.execute_return_success"](cmd) + cmd = f"systemsetup -setdisablekeyboardwhenenclosurelockisengaged {state}" + salt.utils.mac_utils.execute_return_success(cmd) - return __utils__["mac_utils.confirm_updated"]( + return salt.utils.mac_utils.confirm_updated( state, get_disable_keyboard_on_lock, normalize_ret=True, @@ -589,11 +588,11 @@ def get_boot_arch(): salt '*' system.get_boot_arch """ - ret = __utils__["mac_utils.execute_return_result"]( + ret = salt.utils.mac_utils.execute_return_result( "systemsetup -getkernelbootarchitecturesetting" ) - arch = __utils__["mac_utils.parse_return"](ret) + arch = salt.utils.mac_utils.parse_return(ret) if "default" in arch: return "default" @@ -639,10 +638,10 @@ def set_boot_arch(arch="default"): ) raise SaltInvocationError(msg) - cmd = "systemsetup -setkernelbootarchitecture {}".format(arch) - __utils__["mac_utils.execute_return_success"](cmd) + cmd = f"systemsetup -setkernelbootarchitecture {arch}" + salt.utils.mac_utils.execute_return_success(cmd) - return __utils__["mac_utils.confirm_updated"]( + return salt.utils.mac_utils.confirm_updated( arch, get_boot_arch, ) diff --git a/salt/modules/mac_user.py b/salt/modules/mac_user.py index d6662340ffcb..7e4f1b25965e 100644 --- a/salt/modules/mac_user.py +++ b/salt/modules/mac_user.py @@ -529,10 +529,10 @@ def _kcpassword(password): # The magic 11 bytes - these are just repeated # 0x7D 0x89 0x52 0x23 0xD2 0xBC 0xDD 0xEA 0xA3 0xB9 0x1F key = [125, 137, 82, 35, 210, 188, 221, 234, 163, 185, 31] - key_len = len(key) + key_len = len(key) + 1 # macOS adds an extra byte for the trailing null - # Convert each character to a byte - password = list(map(ord, password)) + # Convert each character to a byte and add a trailing null + password = list(map(ord, password)) + [0] # pad password length out to an even multiple of key length remainder = len(password) % key_len @@ -554,9 +554,8 @@ def _kcpassword(password): password[password_index] = password[password_index] ^ key[key_index] key_index += 1 - # Convert each byte back to a character - password = list(map(chr, password)) - return b"".join(salt.utils.data.encode(password)) + # Return the raw bytes + return bytes(password) def enable_auto_login(name, password): diff --git a/salt/modules/mac_xattr.py b/salt/modules/mac_xattr.py index f8d684e8e5be..e2bbf1c503a9 100644 --- a/salt/modules/mac_xattr.py +++ b/salt/modules/mac_xattr.py @@ -110,6 +110,9 @@ def read(path, attribute, **kwargs): try: ret = salt.utils.mac_utils.execute_return_result(cmd) + except UnicodeDecodeError as exc: + # Mimic the builtin xattr tool by replacing undecodeable bytes. + return exc.object.decode(errors="replace") except CommandExecutionError as exc: if "No such file" in exc.strerror: raise CommandExecutionError("File not found: {}".format(path)) diff --git a/salt/modules/macpackage.py b/salt/modules/macpackage.py index faf5810d4fc7..f9a6b7bb95cc 100644 --- a/salt/modules/macpackage.py +++ b/salt/modules/macpackage.py @@ -9,31 +9,16 @@ import salt.utils.platform -try: - import pipes - - HAS_DEPS = True -except ImportError: - HAS_DEPS = False - - log = logging.getLogger(__name__) -__virtualname__ = "macpackage" - -if hasattr(shlex, "quote"): - _quote = shlex.quote -elif HAS_DEPS and hasattr(pipes, "quote"): - _quote = pipes.quote -else: - _quote = None +__virtualname__ = "macpackage" def __virtual__(): """ Only work on Mac OS """ - if salt.utils.platform.is_darwin() and _quote is not None: + if salt.utils.platform.is_darwin(): return __virtualname__ return (False, "Only available on Mac OS systems with pipes") @@ -60,11 +45,11 @@ def install(pkg, target="LocalSystem", store=False, allow_untrusted=False): """ if "*." not in pkg: # If we use wildcards, we cannot use quotes - pkg = _quote(pkg) + pkg = shlex.quote(pkg) - target = _quote(target) + target = shlex.quote(target) - cmd = "installer -pkg {} -target {}".format(pkg, target) + cmd = f"installer -pkg {pkg} -target {target}" if store: cmd += " -store" if allow_untrusted: @@ -109,7 +94,7 @@ def install_app(app, target="/Applications/"): if not app[-1] == "/": app += "/" - cmd = 'rsync -a --delete "{}" "{}"'.format(app, target) + cmd = f'rsync -a --delete "{app}" "{target}"' return __salt__["cmd.run"](cmd) @@ -154,7 +139,7 @@ def mount(dmg): temp_dir = __salt__["temp.dir"](prefix="dmg-") - cmd = 'hdiutil attach -readonly -nobrowse -mountpoint {} "{}"'.format(temp_dir, dmg) + cmd = f'hdiutil attach -readonly -nobrowse -mountpoint {temp_dir} "{dmg}"' return __salt__["cmd.run"](cmd), temp_dir @@ -176,7 +161,7 @@ def unmount(mountpoint): salt '*' macpackage.unmount /dev/disk2 """ - cmd = 'hdiutil detach "{}"'.format(mountpoint) + cmd = f'hdiutil detach "{mountpoint}"' return __salt__["cmd.run"](cmd) @@ -216,7 +201,7 @@ def get_pkg_id(pkg): salt '*' macpackage.get_pkg_id /tmp/test.pkg """ - pkg = _quote(pkg) + pkg = shlex.quote(pkg) package_ids = [] # Create temp directory @@ -224,7 +209,7 @@ def get_pkg_id(pkg): try: # List all of the PackageInfo files - cmd = "xar -t -f {} | grep PackageInfo".format(pkg) + cmd = f"xar -t -f {pkg} | grep PackageInfo" out = __salt__["cmd.run"](cmd, python_shell=True, output_loglevel="quiet") files = out.split("\n") @@ -264,12 +249,12 @@ def get_mpkg_ids(mpkg): salt '*' macpackage.get_mpkg_ids /dev/disk2 """ - mpkg = _quote(mpkg) + mpkg = shlex.quote(mpkg) package_infos = [] base_path = os.path.dirname(mpkg) # List all of the .pkg files - cmd = "find {} -name *.pkg".format(base_path) + cmd = f"find {base_path} -name *.pkg" out = __salt__["cmd.run"](cmd, python_shell=True) pkg_files = out.split("\n") @@ -281,7 +266,7 @@ def get_mpkg_ids(mpkg): def _get_pkg_id_from_pkginfo(pkginfo): # Find our identifiers - pkginfo = _quote(pkginfo) + pkginfo = shlex.quote(pkginfo) cmd = "cat {} | grep -Eo 'identifier=\"[a-zA-Z.0-9\\-]*\"' | cut -c 13- | tr -d '\"'".format( pkginfo ) @@ -294,8 +279,8 @@ def _get_pkg_id_from_pkginfo(pkginfo): def _get_pkg_id_dir(path): - path = _quote(os.path.join(path, "Contents/Info.plist")) - cmd = '/usr/libexec/PlistBuddy -c "print :CFBundleIdentifier" {}'.format(path) + path = shlex.quote(os.path.join(path, "Contents/Info.plist")) + cmd = f'/usr/libexec/PlistBuddy -c "print :CFBundleIdentifier" {path}' # We can only use wildcards in python_shell which is # sent by the macpackage state diff --git a/salt/modules/match.py b/salt/modules/match.py index a6775a4916d4..7c7f6d933eaa 100644 --- a/salt/modules/match.py +++ b/salt/modules/match.py @@ -22,7 +22,6 @@ def _load_matchers(): """ Store matchers in __context__ so they're only loaded once """ - __context__["matchers"] = {} __context__["matchers"] = salt.loader.matchers(__opts__) diff --git a/salt/modules/mdata.py b/salt/modules/mdata.py index 13f2e4cb3968..9b31be55dbc4 100644 --- a/salt/modules/mdata.py +++ b/salt/modules/mdata.py @@ -176,6 +176,3 @@ def delete_(*keyname): ret[k] = True return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/mod_random.py b/salt/modules/mod_random.py index 247056a63d06..5a2b0ef868db 100644 --- a/salt/modules/mod_random.py +++ b/salt/modules/mod_random.py @@ -89,7 +89,7 @@ def get_str( ): """ .. versionadded:: 2014.7.0 - .. versionchanged:: 3004.0 + .. versionchanged:: 3004 Changed the default character set used to include symbols and implemented arguments to control the used character set. @@ -99,14 +99,14 @@ def get_str( Any valid number of bytes. chars : None - .. versionadded:: 3004.0 + .. versionadded:: 3004 String with any character that should be used to generate random string. This argument supersedes all other character controlling arguments. lowercase : True - .. versionadded:: 3004.0 + .. versionadded:: 3004 Use lowercase letters in generated random string. (see :py:data:`string.ascii_lowercase`) @@ -114,7 +114,7 @@ def get_str( This argument is superseded by chars. uppercase : True - .. versionadded:: 3004.0 + .. versionadded:: 3004 Use uppercase letters in generated random string. (see :py:data:`string.ascii_uppercase`) @@ -122,7 +122,7 @@ def get_str( This argument is superseded by chars. digits : True - .. versionadded:: 3004.0 + .. versionadded:: 3004 Use digits in generated random string. (see :py:data:`string.digits`) @@ -130,7 +130,7 @@ def get_str( This argument is superseded by chars. printable : False - .. versionadded:: 3004.0 + .. versionadded:: 3004 Use printable characters in generated random string and includes lowercase, uppercase, digits, punctuation and whitespace. @@ -143,7 +143,7 @@ def get_str( This argument is superseded by chars. punctuation : True - .. versionadded:: 3004.0 + .. versionadded:: 3004 Use punctuation characters in generated random string. (see :py:data:`string.punctuation`) @@ -151,7 +151,7 @@ def get_str( This argument is superseded by chars. whitespace : False - .. versionadded:: 3004.0 + .. versionadded:: 3004 Use whitespace characters in generated random string. (see :py:data:`string.whitespace`) diff --git a/salt/modules/msteams.py b/salt/modules/msteams.py index c85a186ce995..aa2d546ca79e 100644 --- a/salt/modules/msteams.py +++ b/salt/modules/msteams.py @@ -70,8 +70,16 @@ def post_card(message, hook_url=None, title=None, theme_color=None): payload = {"text": message, "title": title, "themeColor": theme_color} + headers = { + "Content-Type": "application/json", + } + result = salt.utils.http.query( - hook_url, method="POST", data=salt.utils.json.dumps(payload), status=True + hook_url, + method="POST", + header_dict=headers, + data=salt.utils.json.dumps(payload), + status=True, ) if result["status"] <= 201: diff --git a/salt/modules/nacl.py b/salt/modules/nacl.py index d62d389718ca..ef0e17745480 100644 --- a/salt/modules/nacl.py +++ b/salt/modules/nacl.py @@ -1,7 +1,7 @@ """ This module helps include encrypted passwords in pillars, grains and salt state files. -:depends: libnacl, https://github.com/saltstack/libnacl +:depends: PyNaCl, https://github.com/pyca/pynacl This is often useful if you wish to store your pillars in source control or share your pillar data with others that you trust. I don't advise making your pillars public @@ -137,9 +137,9 @@ .. code-block:: python #!/bin/python3 - import sys, base64, libnacl.sealed + import sys, base64, nacl.public pk = base64.b64decode('YOURPUBKEY') - b = libnacl.sealed.SealedBox(pk) + b = nacl.public.SealedBox(pk) data = sys.stdin.buffer.read() print(base64.b64encode(b.encrypt(data)).decode()) @@ -163,7 +163,7 @@ def __virtual__(): def keygen(sk_file=None, pk_file=None, **kwargs): """ - Use libnacl to generate a keypair. + Use PyNaCl to generate a keypair. If no `sk_file` is defined return a keypair. diff --git a/salt/modules/netbsd_sysctl.py b/salt/modules/netbsd_sysctl.py index 648120b59b60..cd18ea36e475 100644 --- a/salt/modules/netbsd_sysctl.py +++ b/salt/modules/netbsd_sysctl.py @@ -159,6 +159,3 @@ def persist(name, value, config="/etc/sysctl.conf"): assign(name, value) return "Updated" - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/netbsdservice.py b/salt/modules/netbsdservice.py index 7b36be1a214d..e8308415930e 100644 --- a/salt/modules/netbsdservice.py +++ b/salt/modules/netbsdservice.py @@ -310,6 +310,3 @@ def disabled(name): salt '*' service.disabled """ return _get_svc("/etc/rc.d/{}".format(name), "NO") - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/openscap.py b/salt/modules/openscap.py index 770c8e7c0438..762796cce47e 100644 --- a/salt/modules/openscap.py +++ b/salt/modules/openscap.py @@ -92,8 +92,8 @@ def xccdf(params): tempdir = tempfile.mkdtemp() proc = Popen(shlex.split(cmd), stdout=PIPE, stderr=PIPE, cwd=tempdir) (stdoutdata, error) = proc.communicate() - success = _OSCAP_EXIT_CODES_MAP[proc.returncode] returncode = proc.returncode + success = _OSCAP_EXIT_CODES_MAP.get(returncode, False) if success: __salt__["cp.push_dir"](tempdir) shutil.rmtree(tempdir, ignore_errors=True) diff --git a/salt/modules/openstack_config.py b/salt/modules/openstack_config.py index 823afbf1c60d..937c10da61a4 100644 --- a/salt/modules/openstack_config.py +++ b/salt/modules/openstack_config.py @@ -13,28 +13,11 @@ import salt.exceptions import salt.utils.decorators.path -try: - import pipes - - HAS_DEPS = True -except ImportError: - HAS_DEPS = False - -if hasattr(shlex, "quote"): - _quote = shlex.quote -elif HAS_DEPS and hasattr(pipes, "quote"): - _quote = pipes.quote -else: - _quote = None - - # Don't shadow built-in's. __func_alias__ = {"set_": "set"} def __virtual__(): - if _quote is None and not HAS_DEPS: - return (False, "Missing dependencies") return True @@ -69,10 +52,10 @@ def set_(filename, section, parameter, value): salt-call openstack_config.set /etc/keystone/keystone.conf sql connection foo """ - filename = _quote(filename) - section = _quote(section) - parameter = _quote(parameter) - value = _quote(str(value)) + filename = shlex.quote(filename) + section = shlex.quote(section) + parameter = shlex.quote(parameter) + value = shlex.quote(str(value)) result = __salt__["cmd.run_all"]( "openstack-config --set {} {} {} {}".format( @@ -109,12 +92,12 @@ def get(filename, section, parameter): """ - filename = _quote(filename) - section = _quote(section) - parameter = _quote(parameter) + filename = shlex.quote(filename) + section = shlex.quote(section) + parameter = shlex.quote(parameter) result = __salt__["cmd.run_all"]( - "openstack-config --get {} {} {}".format(filename, section, parameter), + f"openstack-config --get {filename} {section} {parameter}", python_shell=False, ) @@ -145,12 +128,12 @@ def delete(filename, section, parameter): salt-call openstack_config.delete /etc/keystone/keystone.conf sql connection """ - filename = _quote(filename) - section = _quote(section) - parameter = _quote(parameter) + filename = shlex.quote(filename) + section = shlex.quote(section) + parameter = shlex.quote(parameter) result = __salt__["cmd.run_all"]( - "openstack-config --del {} {} {}".format(filename, section, parameter), + f"openstack-config --del {filename} {section} {parameter}", python_shell=False, ) diff --git a/salt/modules/openvswitch.py b/salt/modules/openvswitch.py index eb34cb82eb01..e0dc7ac2677f 100644 --- a/salt/modules/openvswitch.py +++ b/salt/modules/openvswitch.py @@ -193,11 +193,11 @@ def bridge_create(br, may_exist=True, parent=None, vlan=None): parent : string name of the parent bridge (if the bridge shall be created as a fake bridge). If specified, vlan must also be specified. - .. versionadded:: 3006 + .. versionadded:: 3006.0 vlan : int VLAN ID of the bridge (if the bridge shall be created as a fake bridge). If specified, parent must also be specified. - .. versionadded:: 3006 + .. versionadded:: 3006.0 Returns: True on success, else False. @@ -252,7 +252,7 @@ def bridge_delete(br, if_exists=True): def bridge_to_parent(br): """ - .. versionadded:: 3006 + .. versionadded:: 3006.0 Returns the parent bridge of a bridge. @@ -280,7 +280,7 @@ def bridge_to_parent(br): def bridge_to_vlan(br): """ - .. versionadded:: 3006 + .. versionadded:: 3006.0 Returns the VLAN ID of a bridge. @@ -599,7 +599,7 @@ def port_create_vxlan(br, port, id, remote, dst_port=None): def db_get(table, record, column, if_exists=False): """ - .. versionadded:: 3006 + .. versionadded:: 3006.0 Gets a column's value for a specific record. @@ -638,7 +638,7 @@ def db_get(table, record, column, if_exists=False): def db_set(table, record, column, value, if_exists=False): """ - .. versionadded:: 3006 + .. versionadded:: 3006.0 Sets a column's value for a specific record. diff --git a/salt/modules/pdbedit.py b/salt/modules/pdbedit.py index e35283eb08e5..45af6fb1ca8f 100644 --- a/salt/modules/pdbedit.py +++ b/salt/modules/pdbedit.py @@ -406,6 +406,3 @@ def modify( ret = "updated" return {login: ret} - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/pillar.py b/salt/modules/pillar.py index b96c8ec6b707..ba195ab3e393 100644 --- a/salt/modules/pillar.py +++ b/salt/modules/pillar.py @@ -32,7 +32,7 @@ def get( saltenv=None, ): """ - .. versionadded:: 0.14 + .. versionadded:: 0.14.0 Attempt to retrieve the named value from :ref:`in-memory pillar data `. If the pillar key is not present in the in-memory diff --git a/salt/modules/pip.py b/salt/modules/pip.py index b11be51163c7..ebe51b854741 100644 --- a/salt/modules/pip.py +++ b/salt/modules/pip.py @@ -144,7 +144,7 @@ def _check_bundled(): """ Gather run-time information to indicate if we are running from source or bundled. """ - if getattr(sys, "frozen", False) and hasattr(sys, "_MEIPASS"): + if hasattr(sys, "RELENV"): return True return False @@ -157,7 +157,7 @@ def _get_pip_bin(bin_env): if not bin_env: if _check_bundled(): logger.debug("pip: Using pip from bundled app") - return [os.path.normpath(sys.executable), "pip"] + return [str(sys.RELENV / "salt-pip")] else: logger.debug("pip: Using pip from currently-running Python") return [os.path.normpath(sys.executable), "-m", "pip"] @@ -464,7 +464,7 @@ def install( cache_dir=None, no_binary=None, disable_version_check=False, - **kwargs + **kwargs, ): """ Install packages with pip @@ -503,7 +503,9 @@ def install( or one or more package names with commas between them log - Log file where a complete (maximum verbosity) record will be kept + Log file where a complete (maximum verbosity) record will be kept. + If this file doesn't exist and the parent directory is writeable, + it will be created. proxy Specify a proxy in the form ``user:passwd@proxy.server:port``. Note @@ -758,6 +760,16 @@ def install( if log: if os.path.isdir(log): raise OSError("'{}' is a directory. Use --log path_to_file".format(log)) + if not os.path.exists(log): + parent = os.path.dirname(log) + if not os.path.exists(parent): + raise OSError( + f"Trying to create '{log}' but parent directory '{parent}' does not exist." + ) + elif not os.access(parent, os.W_OK): + raise OSError( + f"Trying to create '{log}' but parent directory '{parent}' is not writeable." + ) elif not os.access(log, os.W_OK): raise OSError("'{}' is not writeable".format(log)) @@ -1336,7 +1348,7 @@ def list_(prefix=None, bin_env=None, user=None, cwd=None, env_vars=None, **kwarg user=user, cwd=cwd, env_vars=env_vars, - **kwargs + **kwargs, ) cmd = _get_pip_bin(bin_env) @@ -1648,18 +1660,6 @@ def list_all_versions( cwd = _pip_bin_env(cwd, bin_env) cmd = _get_pip_bin(bin_env) - if index_url: - if not salt.utils.url.validate(index_url, VALID_PROTOS): - raise CommandExecutionError("'{}' is not a valid URL".format(index_url)) - cmd.extend(["--index-url", index_url]) - - if extra_index_url: - if not salt.utils.url.validate(extra_index_url, VALID_PROTOS): - raise CommandExecutionError( - "'{}' is not a valid URL".format(extra_index_url) - ) - cmd.extend(["--extra-index-url", extra_index_url]) - # Is the `pip index` command available pip_version = version(bin_env=bin_env, cwd=cwd, user=user) if salt.utils.versions.compare(ver1=pip_version, oper=">=", ver2="21.2"): @@ -1671,6 +1671,17 @@ def list_all_versions( regex = re.compile(r"\s*Could not find a version.* \(from versions: (.*)\)") cmd.extend(["install", "{}==versions".format(pkg)]) + if index_url: + if not salt.utils.url.validate(index_url, VALID_PROTOS): + raise CommandExecutionError("'{}' is not a valid URL".format(index_url)) + cmd.extend(["--index-url", index_url]) + + if extra_index_url: + if not salt.utils.url.validate(extra_index_url, VALID_PROTOS): + raise CommandExecutionError( + "'{}' is not a valid URL".format(extra_index_url) + ) + cmd.extend(["--extra-index-url", extra_index_url]) cmd_kwargs = dict( cwd=cwd, runas=user, output_loglevel="quiet", redirect_stderr=True ) diff --git a/salt/modules/pkgin.py b/salt/modules/pkgin.py index c27b6ed8fceb..8380360047ca 100644 --- a/salt/modules/pkgin.py +++ b/salt/modules/pkgin.py @@ -701,6 +701,3 @@ def normalize_name(pkgs, **kwargs): with the pkg_resource provider.) """ return pkgs - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/postgres.py b/salt/modules/postgres.py index 25a72f1063cc..f73959a92eda 100644 --- a/salt/modules/postgres.py +++ b/salt/modules/postgres.py @@ -46,8 +46,8 @@ import io import logging import os -import pipes import re +import shlex import tempfile import salt.utils.files @@ -136,7 +136,7 @@ def __virtual__(): for util in utils: if not salt.utils.path.which(util): if not _find_pg_binary(util): - return (False, "{} was not found".format(util)) + return (False, f"{util} was not found") return True @@ -241,14 +241,14 @@ def _run_initdb( raise CommandExecutionError("initdb executable not found.") cmd = [ _INITDB_BIN, - "--pgdata={}".format(name), - "--username={}".format(user), - "--auth={}".format(auth), - "--encoding={}".format(encoding), + f"--pgdata={name}", + f"--username={user}", + f"--auth={auth}", + f"--encoding={encoding}", ] if locale is not None: - cmd.append("--locale={}".format(locale)) + cmd.append(f"--locale={locale}") # intentionally use short option, as the long option name has been # renamed from "xlogdir" to "waldir" in PostgreSQL 10 @@ -262,9 +262,9 @@ def _run_initdb( if password is not None: pgpassfile = salt.utils.files.mkstemp(text=True) with salt.utils.files.fopen(pgpassfile, "w") as fp_: - fp_.write(salt.utils.stringutils.to_str("{}".format(password))) + fp_.write(salt.utils.stringutils.to_str(f"{password}")) __salt__["file.chown"](pgpassfile, runas, "") - cmd.extend(["--pwfile={}".format(pgpassfile)]) + cmd.extend([f"--pwfile={pgpassfile}"]) kwargs = dict( runas=runas, @@ -273,7 +273,7 @@ def _run_initdb( "postgres.timeout", default=_DEFAULT_COMMAND_TIMEOUT_SECS ), ) - cmdstr = " ".join([pipes.quote(c) for c in cmd]) + cmdstr = " ".join([shlex.quote(c) for c in cmd]) ret = __salt__["cmd.run_all"](cmdstr, python_shell=False, **kwargs) if ret.get("retcode", 0) != 0: @@ -582,9 +582,7 @@ def _quote_ddl_value(value, quote="'"): if value is None: return None if quote in value: # detect trivial sqli - raise SaltInvocationError( - "Unsupported character {} in value: {}".format(quote, value) - ) + raise SaltInvocationError(f"Unsupported character {quote} in value: {value}") return "{quote}{value}{quote}".format(quote=quote, value=value) @@ -617,7 +615,7 @@ def db_create( """ # Base query to create a database - query = 'CREATE DATABASE "{}"'.format(name) + query = f'CREATE DATABASE "{name}"' # "With"-options to create a database with_args = salt.utils.odict.OrderedDict( @@ -685,11 +683,9 @@ def db_alter( else: queries = [] if owner: - queries.append('ALTER DATABASE "{}" OWNER TO "{}"'.format(name, owner)) + queries.append(f'ALTER DATABASE "{name}" OWNER TO "{owner}"') if tablespace: - queries.append( - 'ALTER DATABASE "{}" SET TABLESPACE "{}"'.format(name, tablespace) - ) + queries.append(f'ALTER DATABASE "{name}" SET TABLESPACE "{tablespace}"') for query in queries: ret = _psql_prepare_and_run( ["-c", query], @@ -726,10 +722,10 @@ def db_remove( salt '*' postgres.db_remove 'dbname' """ for query in [ - 'REVOKE CONNECT ON DATABASE "{db}" FROM public;'.format(db=name), + f'REVOKE CONNECT ON DATABASE "{name}" FROM public;', "SELECT pid, pg_terminate_backend(pid) FROM pg_stat_activity WHERE datname =" " '{db}' AND pid <> pg_backend_pid();".format(db=name), - 'DROP DATABASE "{db}";'.format(db=name), + f'DROP DATABASE "{name}";', ]: ret = _psql_prepare_and_run( ["-c", query], @@ -741,7 +737,7 @@ def db_remove( password=password, ) if ret["retcode"] != 0: - raise Exception("Failed: ret={}".format(ret)) + raise Exception(f"Failed: ret={ret}") return True @@ -846,10 +842,10 @@ def tablespace_create( owner_query = "" options_query = "" if owner: - owner_query = 'OWNER "{}"'.format(owner) + owner_query = f'OWNER "{owner}"' # should come out looking like: 'OWNER postgres' if options: - optionstext = ["{} = {}".format(k, v) for k, v in options.items()] + optionstext = [f"{k} = {v}" for k, v in options.items()] options_query = "WITH ( {} )".format(", ".join(optionstext)) # should come out looking like: 'WITH ( opt1 = 1.0, opt2 = 4.0 )' query = "CREATE TABLESPACE \"{}\" {} LOCATION '{}' {}".format( @@ -902,9 +898,9 @@ def tablespace_alter( queries = [] if new_name: - queries.append('ALTER TABLESPACE "{}" RENAME TO "{}"'.format(name, new_name)) + queries.append(f'ALTER TABLESPACE "{name}" RENAME TO "{new_name}"') if new_owner: - queries.append('ALTER TABLESPACE "{}" OWNER TO "{}"'.format(name, new_owner)) + queries.append(f'ALTER TABLESPACE "{name}" OWNER TO "{new_owner}"') if set_option: queries.append( 'ALTER TABLESPACE "{}" SET ({} = {})'.format( @@ -912,7 +908,7 @@ def tablespace_alter( ) ) if reset_option: - queries.append('ALTER TABLESPACE "{}" RESET ({})'.format(name, reset_option)) + queries.append(f'ALTER TABLESPACE "{name}" RESET ({reset_option})') for query in queries: ret = _psql_prepare_and_run( @@ -950,7 +946,7 @@ def tablespace_remove( .. versionadded:: 2015.8.0 """ - query = 'DROP TABLESPACE "{}"'.format(name) + query = f'DROP TABLESPACE "{name}"' ret = _psql_prepare_and_run( ["-c", query], user=user, @@ -1158,11 +1154,11 @@ def _add_role_flag(string, test, flag, cond=None, prefix="NO", addtxt="", skip=F cond = test if test is not None: if cond: - string = "{} {}".format(string, flag) + string = f"{string} {flag}" else: - string = "{0} {2}{1}".format(string, flag, prefix) + string = f"{string} {prefix}{flag}" if addtxt: - string = "{} {}".format(string, addtxt) + string = f"{string} {addtxt}" return string @@ -1224,7 +1220,7 @@ def _verify_password(role, password, verifier, method): def _md5_password(role, password): return "md5{}".format( hashlib.md5( # nosec - salt.utils.stringutils.to_bytes("{}{}".format(password, role)) + salt.utils.stringutils.to_bytes(f"{password}{role}") ).hexdigest() ) @@ -1343,7 +1339,7 @@ def _role_cmd_args( if isinstance(groups, list): groups = ",".join(groups) for group in groups.split(","): - sub_cmd = '{}; GRANT "{}" TO "{}"'.format(sub_cmd, group, name) + sub_cmd = f'{sub_cmd}; GRANT "{group}" TO "{name}"' return sub_cmd @@ -1380,7 +1376,7 @@ def _role_create( log.info("%s '%s' already exists", typ_.capitalize(), name) return False - sub_cmd = 'CREATE ROLE "{}" WITH'.format(name) + sub_cmd = f'CREATE ROLE "{name}" WITH' sub_cmd = "{} {}".format( sub_cmd, _role_cmd_args( @@ -1506,7 +1502,7 @@ def _role_update( log.info("%s '%s' could not be found", typ_.capitalize(), name) return False - sub_cmd = 'ALTER ROLE "{}" WITH'.format(name) + sub_cmd = f'ALTER ROLE "{name}" WITH' sub_cmd = "{} {}".format( sub_cmd, _role_cmd_args( @@ -1613,7 +1609,7 @@ def _role_remove( return False # user exists, proceed - sub_cmd = 'DROP ROLE "{}"'.format(name) + sub_cmd = f'DROP ROLE "{name}"' _psql_prepare_and_run( ["-c", sub_cmd], runas=runas, @@ -1995,14 +1991,14 @@ def create_extension( args = ["CREATE EXTENSION"] if if_not_exists: args.append("IF NOT EXISTS") - args.append('"{}"'.format(name)) + args.append(f'"{name}"') sargs = [] if schema: - sargs.append('SCHEMA "{}"'.format(schema)) + sargs.append(f'SCHEMA "{schema}"') if ext_version: - sargs.append("VERSION {}".format(ext_version)) + sargs.append(f"VERSION {ext_version}") if from_version: - sargs.append("FROM {}".format(from_version)) + sargs.append(f"FROM {from_version}") if sargs: args.append("WITH") args.extend(sargs) @@ -2011,13 +2007,9 @@ def create_extension( else: args = [] if schema and _EXTENSION_TO_MOVE in mtdata: - args.append( - 'ALTER EXTENSION "{}" SET SCHEMA "{}";'.format(name, schema) - ) + args.append(f'ALTER EXTENSION "{name}" SET SCHEMA "{schema}";') if ext_version and _EXTENSION_TO_UPGRADE in mtdata: - args.append( - 'ALTER EXTENSION "{}" UPDATE TO {};'.format(name, ext_version) - ) + args.append(f'ALTER EXTENSION "{name}" UPDATE TO {ext_version};') cmd = " ".join(args).strip() if cmd: _psql_prepare_and_run( @@ -2227,7 +2219,7 @@ def owner_to( sqlfile = tempfile.NamedTemporaryFile() sqlfile.write("begin;\n") - sqlfile.write('alter database "{}" owner to "{}";\n'.format(dbname, ownername)) + sqlfile.write(f'alter database "{dbname}" owner to "{ownername}";\n') queries = ( # schemas @@ -2335,9 +2327,9 @@ def schema_create( log.info("'%s' already exists in '%s'", name, dbname) return False - sub_cmd = 'CREATE SCHEMA "{}"'.format(name) + sub_cmd = f'CREATE SCHEMA "{name}"' if owner is not None: - sub_cmd = '{} AUTHORIZATION "{}"'.format(sub_cmd, owner) + sub_cmd = f'{sub_cmd} AUTHORIZATION "{owner}"' ret = _psql_prepare_and_run( ["-c", sub_cmd], @@ -2401,7 +2393,7 @@ def schema_remove( return False # schema exists, proceed - sub_cmd = 'DROP SCHEMA "{}"'.format(name) + sub_cmd = f'DROP SCHEMA "{name}"' _psql_prepare_and_run( ["-c", sub_cmd], runas=user, @@ -2721,7 +2713,7 @@ def language_create( log.info("Language %s already exists in %s", name, maintenance_db) return False - query = "CREATE LANGUAGE {}".format(name) + query = f"CREATE LANGUAGE {name}" ret = _psql_prepare_and_run( ["-c", query], @@ -2776,7 +2768,7 @@ def language_remove( log.info("Language %s does not exist in %s", name, maintenance_db) return False - query = "DROP LANGUAGE {}".format(name) + query = f"DROP LANGUAGE {name}" ret = _psql_prepare_and_run( ["-c", query], @@ -3035,9 +3027,7 @@ def _validate_privileges(object_type, privs, privileges): _perms.append("ALL") if object_type not in _PRIVILEGES_OBJECTS: - raise SaltInvocationError( - "Invalid object_type: {} provided".format(object_type) - ) + raise SaltInvocationError(f"Invalid object_type: {object_type} provided") if not set(privs).issubset(set(_perms)): raise SaltInvocationError( @@ -3145,9 +3135,7 @@ def privileges_list( query = _make_privileges_list_query(name, object_type, prepend) if object_type not in _PRIVILEGES_OBJECTS: - raise SaltInvocationError( - "Invalid object_type: {} provided".format(object_type) - ) + raise SaltInvocationError(f"Invalid object_type: {object_type} provided") rows = psql_query( query, @@ -3439,15 +3427,15 @@ def privileges_grant( _grants = ",".join(_privs) if object_type in ["table", "sequence"]: - on_part = '{}."{}"'.format(prepend, object_name) + on_part = f'{prepend}."{object_name}"' elif object_type == "function": - on_part = "{}".format(object_name) + on_part = f"{object_name}" else: - on_part = '"{}"'.format(object_name) + on_part = f'"{object_name}"' if grant_option: if object_type == "group": - query = 'GRANT {} TO "{}" WITH ADMIN OPTION'.format(object_name, name) + query = f'GRANT {object_name} TO "{name}" WITH ADMIN OPTION' elif object_type in ("table", "sequence") and object_name.upper() == "ALL": query = 'GRANT {} ON ALL {}S IN SCHEMA {} TO "{}" WITH GRANT OPTION'.format( _grants, object_type.upper(), prepend, name @@ -3458,7 +3446,7 @@ def privileges_grant( ) else: if object_type == "group": - query = 'GRANT {} TO "{}"'.format(object_name, name) + query = f'GRANT {object_name} TO "{name}"' elif object_type in ("table", "sequence") and object_name.upper() == "ALL": query = 'GRANT {} ON ALL {}S IN SCHEMA {} TO "{}"'.format( _grants, object_type.upper(), prepend, name @@ -3587,12 +3575,12 @@ def privileges_revoke( _grants = ",".join(_privs) if object_type in ["table", "sequence"]: - on_part = "{}.{}".format(prepend, object_name) + on_part = f"{prepend}.{object_name}" else: on_part = object_name if object_type == "group": - query = "REVOKE {} FROM {}".format(object_name, name) + query = f"REVOKE {object_name} FROM {name}" else: query = "REVOKE {} ON {} {} FROM {}".format( _grants, object_type.upper(), on_part, name diff --git a/salt/modules/ps.py b/salt/modules/ps.py index ab307e98b53f..3550880268d9 100644 --- a/salt/modules/ps.py +++ b/salt/modules/ps.py @@ -218,7 +218,7 @@ def proc_info(pid, attrs=None): attrs Optional list of desired process attributes. The list of possible attributes can be found here: - http://pythonhosted.org/psutil/#psutil.Process + https://psutil.readthedocs.io/en/latest/#processes """ try: proc = psutil.Process(pid) @@ -648,34 +648,8 @@ def get_users(): salt '*' ps.get_users """ - try: - recs = psutil.users() - return [dict(x._asdict()) for x in recs] - except AttributeError: - # get_users is only present in psutil > v0.5.0 - # try utmp - try: - import utmp # pylint: disable=import-error - - result = [] - while True: - rec = utmp.utmpaccess.getutent() - if rec is None: - return result - elif rec[0] == 7: - started = rec[8] - if isinstance(started, tuple): - started = started[0] - result.append( - { - "name": rec[4], - "terminal": rec[2], - "started": started, - "host": rec[5], - } - ) - except ImportError: - return False + recs = psutil.users() + return [dict(x._asdict()) for x in recs] def lsof(name): @@ -768,7 +742,7 @@ def psaux(name): if not salt_exception_pattern.search(info): nb_lines += 1 found_infos.append(info) - pid_count = str(nb_lines) + " occurence(s)." + pid_count = str(nb_lines) + " occurrence(s)." ret = [] ret.extend([sanitize_name, found_infos, pid_count]) return ret diff --git a/salt/modules/rbac_solaris.py b/salt/modules/rbac_solaris.py index a740bf946217..876eaf0bf70f 100644 --- a/salt/modules/rbac_solaris.py +++ b/salt/modules/rbac_solaris.py @@ -634,6 +634,3 @@ def auth_rm(user, auth): ret[a] = "Remove" return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/rh_service.py b/salt/modules/rh_service.py index 4a697101ef0a..b8b802479fed 100644 --- a/salt/modules/rh_service.py +++ b/salt/modules/rh_service.py @@ -545,7 +545,7 @@ def delete(name, **kwargs): """ Delete the named service - .. versionadded:: 2016.3 + .. versionadded:: 2016.3.0 CLI Example: diff --git a/salt/modules/runit.py b/salt/modules/runit.py index 2a5e655c2bbf..29f95bf377bb 100644 --- a/salt/modules/runit.py +++ b/salt/modules/runit.py @@ -726,6 +726,3 @@ def remove(name): log.error("Unable to remove symlink %s", svc_path) return False return True - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/salt_version.py b/salt/modules/salt_version.py index 1b5421fee4a7..99dae5f61a5b 100644 --- a/salt/modules/salt_version.py +++ b/salt/modules/salt_version.py @@ -20,7 +20,7 @@ .. code-block:: jinja {# a boolean check #} - {% set option_deprecated = salt['salt_version.less_than']("3001") %} + {% set option_deprecated = salt['salt_version.less_than']("Sodium") %} {% if option_deprecated %} @@ -35,6 +35,7 @@ import salt.utils.versions import salt.version +from salt.exceptions import CommandExecutionError log = logging.getLogger(__name__) @@ -51,7 +52,7 @@ def __virtual__(): def get_release_number(name): """ Returns the release number of a given release code name in a - ``MAJOR.PATCH`` format. + ``MAJOR.PATCH`` format (for Salt versions < 3000) or ``MAJOR`` for newer Salt versions. If the release name has not been given an assigned release number, the function returns a string. If the release cannot be found, it returns @@ -66,6 +67,9 @@ def get_release_number(name): salt '*' salt_version.get_release_number 'Oxygen' """ + if not isinstance(name, str): + raise CommandExecutionError("'name' argument must be a string") + name = name.lower() version_map = salt.version.SaltStackVersion.LNAMES version = version_map.get(name) diff --git a/salt/modules/saltutil.py b/salt/modules/saltutil.py index 4642d5011bfa..ecf467046aa2 100644 --- a/salt/modules/saltutil.py +++ b/salt/modules/saltutil.py @@ -381,6 +381,9 @@ def refresh_grains(**kwargs): refresh_pillar : True Set to ``False`` to keep pillar data from being refreshed. + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -389,6 +392,7 @@ def refresh_grains(**kwargs): """ kwargs = salt.utils.args.clean_kwargs(**kwargs) _refresh_pillar = kwargs.pop("refresh_pillar", True) + clean_pillar_cache = kwargs.pop("clean_pillar_cache", False) if kwargs: salt.utils.args.invalid_kwargs(kwargs) # Modules and pillar need to be refreshed in case grains changes affected @@ -396,14 +400,18 @@ def refresh_grains(**kwargs): # newly-reloaded grains to each execution module's __grains__ dunder. if _refresh_pillar: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) else: refresh_modules() return True def sync_grains( - saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, ): """ .. versionadded:: 0.10.0 @@ -430,6 +438,9 @@ def sync_grains( extmod_blacklist : None comma-separated list of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -441,7 +452,7 @@ def sync_grains( ret = _sync("grains", saltenv, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret @@ -915,7 +926,11 @@ def sync_log_handlers( def sync_pillar( - saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, ): """ .. versionadded:: 2015.8.11,2016.3.2 @@ -935,6 +950,9 @@ def sync_pillar( extmod_blacklist : None comma-separated list of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + .. note:: This function will raise an error if executed on a traditional (i.e. not masterless) minion @@ -953,7 +971,7 @@ def sync_pillar( ret = _sync("pillar", saltenv, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret @@ -998,7 +1016,13 @@ def sync_executors( return ret -def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist=None): +def sync_all( + saltenv=None, + refresh=True, + extmod_whitelist=None, + extmod_blacklist=None, + clean_pillar_cache=False, +): """ .. versionchanged:: 2015.8.11,2016.3.2 On masterless minions, pillar modules are now synced, and refreshed @@ -1036,6 +1060,9 @@ def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist extmod_blacklist : None dictionary of modules to blacklist based on type + clean_pillar_cache : False + Set to ``True`` to refresh pillar cache. + CLI Examples: .. code-block:: bash @@ -1080,7 +1107,7 @@ def sync_all(saltenv=None, refresh=True, extmod_whitelist=None, extmod_blacklist ret["pillar"] = sync_pillar(saltenv, False, extmod_whitelist, extmod_blacklist) if refresh: # we don't need to call refresh_modules here because it's done by refresh_pillar - refresh_pillar() + refresh_pillar(clean_cache=clean_pillar_cache) return ret @@ -1730,8 +1757,10 @@ def runner( arg = [] if kwarg is None: kwarg = {} + pub_data = {} jid = kwargs.pop("__orchestration_jid__", jid) saltenv = kwargs.pop("__env__", saltenv) + pub_data["user"] = kwargs.pop("__pub_user", "UNKNOWN") kwargs = salt.utils.args.clean_kwargs(**kwargs) if kwargs: kwarg.update(kwargs) @@ -1760,7 +1789,12 @@ def runner( ) return rclient.cmd( - name, arg=arg, kwarg=kwarg, print_event=False, full_return=full_return + name, + arg=arg, + pub_data=pub_data, + kwarg=kwarg, + print_event=False, + full_return=full_return, ) diff --git a/salt/modules/schedule.py b/salt/modules/schedule.py index 64e2234c3bd2..b8553bc6f785 100644 --- a/salt/modules/schedule.py +++ b/salt/modules/schedule.py @@ -521,8 +521,7 @@ def build_schedule_item(name, **kwargs): else: schedule[name]["enabled"] = True - if "jid_include" not in kwargs or kwargs["jid_include"]: - schedule[name]["jid_include"] = True + schedule[name]["jid_include"] = kwargs.get("jid_include", True) if "splay" in kwargs: if isinstance(kwargs["splay"], dict): diff --git a/salt/modules/selinux.py b/salt/modules/selinux.py index 9f069bb3122d..c12db3d9e19e 100644 --- a/salt/modules/selinux.py +++ b/salt/modules/selinux.py @@ -135,13 +135,13 @@ def setenforce(mode): if isinstance(mode, str): if mode.lower() == "enforcing": mode = "1" - modestring = "Enforcing" + modestring = "enforcing" elif mode.lower() == "permissive": mode = "0" - modestring = "Permissive" + modestring = "permissive" elif mode.lower() == "disabled": mode = "0" - modestring = "Disabled" + modestring = "disabled" else: return "Invalid mode {}".format(mode) elif isinstance(mode, int): @@ -477,8 +477,10 @@ def fcontext_get_policy( if filetype: _validate_filetype(filetype) re_spacer = "[ ]+" + re_optional_spacer = "[ |\t]*" cmd_kwargs = { "spacer": re_spacer, + "ospacer": re_optional_spacer, "filespec": re.escape(name), "sel_user": sel_user or "[^:]+", "sel_role": "[^:]+", # se_role for file context is always object_r @@ -490,7 +492,7 @@ def fcontext_get_policy( ) cmd = ( "semanage fcontext -l | egrep " - + "'^{filespec}{spacer}{filetype}{spacer}{sel_user}:{sel_role}:{sel_type}:{sel_level}$'".format( + + "'^{filespec}{spacer}{filetype}{spacer}{sel_user}:{sel_role}:{sel_type}:{sel_level}{ospacer}$'".format( **cmd_kwargs ) ) @@ -609,20 +611,29 @@ def _fcontext_add_or_delete_policy( """ if action not in ["add", "delete"]: raise SaltInvocationError( - 'Actions supported are "add" and "delete", not "{}".'.format(action) + f'Actions supported are "add" and "delete", not "{action}".' ) - cmd = "semanage fcontext --{}".format(action) + + if "add" == action: + # need to use --modify if context for name file exists, otherwise ValueError + filespec = re.escape(name) + cmd = f"semanage fcontext -l | egrep '{filespec}'" + current_entry_text = __salt__["cmd.shell"](cmd, ignore_retcode=True) + if current_entry_text != "": + action = "modify" + + cmd = f"semanage fcontext --{action}" # "semanage --ftype a" isn't valid on Centos 6, # don't pass --ftype since "a" is the default filetype. if filetype is not None and filetype != "a": _validate_filetype(filetype) - cmd += " --ftype {}".format(filetype) + cmd += f" --ftype {filetype}" if sel_type is not None: - cmd += " --type {}".format(sel_type) + cmd += f" --type {sel_type}" if sel_user is not None: - cmd += " --seuser {}".format(sel_user) + cmd += f" --seuser {sel_user}" if sel_level is not None: - cmd += " --range {}".format(sel_level) + cmd += f" --range {sel_level}" cmd += " " + re.escape(name) return __salt__["cmd.run_all"](cmd) diff --git a/salt/modules/slackware_service.py b/salt/modules/slackware_service.py index 51562d030326..54bc3c2a77fb 100644 --- a/salt/modules/slackware_service.py +++ b/salt/modules/slackware_service.py @@ -349,6 +349,3 @@ def disabled(name): if _get_svc("{}.{}".format(prefix, name), "OFF") is None: ret = False return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/smartos_imgadm.py b/salt/modules/smartos_imgadm.py index 688371cdd249..5d5cb75e7508 100644 --- a/salt/modules/smartos_imgadm.py +++ b/salt/modules/smartos_imgadm.py @@ -508,6 +508,3 @@ def source_add(source, source_type="imgapi"): return ret return sources(False) - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/smartos_nictagadm.py b/salt/modules/smartos_nictagadm.py index e2991be6708d..99d09826e287 100644 --- a/salt/modules/smartos_nictagadm.py +++ b/salt/modules/smartos_nictagadm.py @@ -263,6 +263,3 @@ def delete(name, force=False): if "stderr" not in res and res["stderr"] == "" else res["stderr"] } - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/smartos_vmadm.py b/salt/modules/smartos_vmadm.py index 4e5b97baddf9..3ef9c3d5b6a2 100644 --- a/salt/modules/smartos_vmadm.py +++ b/salt/modules/smartos_vmadm.py @@ -875,6 +875,3 @@ def receive(uuid, source): ret["Error"] = res["stderr"] if "stderr" in res else _exit_status(retcode) return ret return True - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/solaris_fmadm.py b/salt/modules/solaris_fmadm.py index 7617bed56db1..7dae9d6b100f 100644 --- a/salt/modules/solaris_fmadm.py +++ b/salt/modules/solaris_fmadm.py @@ -509,6 +509,3 @@ def healthy(): salt '*' fmadm.healthy """ return False if faulty() else True - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/sysmod.py b/salt/modules/sysmod.py index 190435e21e0a..e756e544139d 100644 --- a/salt/modules/sysmod.py +++ b/salt/modules/sysmod.py @@ -546,7 +546,7 @@ def list_state_functions(*args, **kwargs): # pylint: disable=unused-argument salt '*' sys.list_state_functions 'file.*' salt '*' sys.list_state_functions 'file.s*' - .. versionadded:: 2016.9 + .. versionadded:: 2016.9.0 .. code-block:: bash diff --git a/salt/modules/test.py b/salt/modules/test.py index 62d96f52118b..fe4c8ec9ae1c 100644 --- a/salt/modules/test.py +++ b/salt/modules/test.py @@ -18,6 +18,7 @@ import salt.utils.platform import salt.utils.versions import salt.version +from salt.config import DEFAULT_HASH_TYPE from salt.utils.decorators import depends __proxyenabled__ = ["*"] @@ -528,7 +529,7 @@ def random_hash(size=9999999999, hash_type=None): salt '*' test.random_hash hash_type=sha512 """ if not hash_type: - hash_type = __opts__.get("hash_type", "md5") + hash_type = __opts__.get("hash_type", DEFAULT_HASH_TYPE) return salt.utils.hashutils.random_hash(size=size, hash_type=hash_type) diff --git a/salt/modules/timezone.py b/salt/modules/timezone.py index 8c05d42cbb46..4904c8dcc6ea 100644 --- a/salt/modules/timezone.py +++ b/salt/modules/timezone.py @@ -16,6 +16,7 @@ import salt.utils.path import salt.utils.platform import salt.utils.stringutils +from salt.config import DEFAULT_HASH_TYPE from salt.exceptions import CommandExecutionError, SaltInvocationError log = logging.getLogger(__name__) @@ -121,7 +122,7 @@ def _get_zone_etc_localtime(): tzfile, ) # Regular file. Try to match the hash. - hash_type = __opts__.get("hash_type", "md5") + hash_type = __opts__.get("hash_type", DEFAULT_HASH_TYPE) tzfile_hash = salt.utils.hashutils.get_hash(tzfile, hash_type) # Not a link, just a copy of the tzdata file for root, dirs, files in salt.utils.path.os_walk(tzdir): diff --git a/salt/modules/transactional_update.py b/salt/modules/transactional_update.py index 6493966782e3..71f6427b15e0 100644 --- a/salt/modules/transactional_update.py +++ b/salt/modules/transactional_update.py @@ -6,7 +6,7 @@ A transactional system, like `MicroOS`_, can present some challenges when the user decided to manage it via Salt. -MicroOS provide a read-only rootfs and a tool, +MicroOS provides a read-only rootfs and a tool, ``transactional-update``, that takes care of the management of the system (updating, upgrading, installation or reboot, among others) in an atomic way. @@ -128,7 +128,7 @@ For example:: - transactional-update --continue --drop-if-no-change run zypper in apache2" + transactional-update --continue --drop-if-no-change run zypper in apache2 If we are in the scenario described before, ``apache2`` is already present in T.3. In this case a new transaction, T.4, will be created @@ -240,11 +240,11 @@ Two step for service restart ............................ -In the ``apache2` example from the beginning we can observe the +In the ``apache2`` example from the beginning we can observe the biggest drawback. If the package ``apache2`` is missing, the new module will create a new transaction, will execute ``pkg.install`` inside the transaction (creating the salt-thin, moving it inside and -delegating the execution to `transactional-update` CLI as part of the +delegating the execution to ``transactional-update`` CLI as part of the full state). Inside the transaction we can do too the required changes in ``/etc`` for adding the new ``vhost``, and we can enable the service via systemctl inside the same transaction. @@ -557,8 +557,8 @@ def kdump(self_update=False, snapshot=None): def run(command, self_update=False, snapshot=None): """Run a command in a new snapshot - Execute the command inside a new snapshot. By default this snaphot - will remain, but if --drop-if-no-chage is set, the new snapshot + Execute the command inside a new snapshot. By default this snapshot + will remain, but if --drop-if-no-change is set, the new snapshot will be dropped if there is no change in the file system. command @@ -920,7 +920,7 @@ def call(function, *args, **kwargs): activate_transaction If at the end of the transaction there is a pending activation - (i.e there is a new snaphot in the system), a new reboot will + (i.e there is a new snapshot in the system), a new reboot will be scheduled (default False) CLI Example: @@ -991,7 +991,7 @@ def apply_(mods=None, **kwargs): activate_transaction If at the end of the transaction there is a pending activation - (i.e there is a new snaphot in the system), a new reboot will + (i.e there is a new snapshot in the system), a new reboot will be scheduled (default False) CLI Example: @@ -1030,15 +1030,15 @@ def sls(mods, activate_transaction=False, queue=False, **kwargs): activate_transaction If at the end of the transaction there is a pending activation - (i.e there is a new snaphot in the system), a new reboot will - be scheduled (default False) + (i.e there is a new snapshot in the system), a new reboot will + be scheduled (Default: False). queue Instead of failing immediately when another state run is in progress, queue the new state run to begin running once the other has finished. This option starts a new thread for each queued state run, so use this - option sparingly. (Default: False) + option sparingly (Default: False). For a formal description of the possible parameters accepted in this function, check `state.sls` documentation. @@ -1075,15 +1075,15 @@ def highstate(activate_transaction=False, queue=False, **kwargs): activate_transaction If at the end of the transaction there is a pending activation - (i.e there is a new snaphot in the system), a new reboot will - be scheduled (default False) + (i.e there is a new snapshot in the system), a new reboot will + be scheduled (Default: False). queue Instead of failing immediately when another state run is in progress, queue the new state run to begin running once the other has finished. This option starts a new thread for each queued state run, so use this - option sparingly. (Default: False) + option sparingly (Default: False). CLI Example: @@ -1117,15 +1117,15 @@ def single(fun, name, activate_transaction=False, queue=False, **kwargs): activate_transaction If at the end of the transaction there is a pending activation - (i.e there is a new snaphot in the system), a new reboot will - be scheduled (default False) + (i.e there is a new snapshot in the system), a new reboot will + be scheduled (Default: False). queue Instead of failing immediately when another state run is in progress, queue the new state run to begin running once the other has finished. This option starts a new thread for each queued state run, so use this - option sparingly. (Default: False) + option sparingly (Default: False). CLI Example: diff --git a/salt/modules/win_file.py b/salt/modules/win_file.py index d02d4589f2fa..a7a411c93cc6 100644 --- a/salt/modules/win_file.py +++ b/salt/modules/win_file.py @@ -16,8 +16,10 @@ import sys import tempfile +import salt.utils.files import salt.utils.path import salt.utils.platform +import salt.utils.user from salt.exceptions import CommandExecutionError, SaltInvocationError from salt.modules.file import ( __clean_tmp, @@ -107,6 +109,15 @@ except ImportError: HAS_WINDOWS_MODULES = False +HAS_WIN_DACL = False +try: + if salt.utils.platform.is_windows(): + import salt.utils.win_dacl + + HAS_WIN_DACL = True +except ImportError: + HAS_WIN_DACL = False + if salt.utils.platform.is_windows(): if HAS_WINDOWS_MODULES: # namespace functions from file.py @@ -194,6 +205,8 @@ def __virtual__(): """ if not salt.utils.platform.is_windows() or not HAS_WINDOWS_MODULES: return False, "Module win_file: Missing Win32 modules" + if not HAS_WIN_DACL: + return False, "Module win_file: Unable to load salt.utils.win_dacl" return __virtualname__ @@ -305,7 +318,7 @@ def group_to_gid(group): if group is None: return "" - return __utils__["dacl.get_sid_string"](group) + return salt.utils.win_dacl.get_sid_string(group) def get_pgid(path, follow_symlinks=True): @@ -346,8 +359,8 @@ def get_pgid(path, follow_symlinks=True): if follow_symlinks and sys.getwindowsversion().major >= 6: path = _resolve_symlink(path) - group_name = __utils__["dacl.get_primary_group"](path) - return __utils__["dacl.get_sid_string"](group_name) + group_name = salt.utils.win_dacl.get_primary_group(path) + return salt.utils.win_dacl.get_sid_string(group_name) def get_pgroup(path, follow_symlinks=True): @@ -498,7 +511,7 @@ def uid_to_user(uid): if uid is None or uid == "": return "" - return __utils__["dacl.get_name"](uid) + return salt.utils.win_dacl.get_name(uid) def user_to_uid(user): @@ -518,9 +531,9 @@ def user_to_uid(user): salt '*' file.user_to_uid myusername """ if user is None: - user = __utils__["user.get_user"]() + user = salt.utils.user.get_user() - return __utils__["dacl.get_sid_string"](user) + return salt.utils.win_dacl.get_sid_string(user) def get_uid(path, follow_symlinks=True): @@ -558,8 +571,8 @@ def get_uid(path, follow_symlinks=True): if follow_symlinks and sys.getwindowsversion().major >= 6: path = _resolve_symlink(path) - owner_sid = __utils__["dacl.get_owner"](path) - return __utils__["dacl.get_sid_string"](owner_sid) + owner_sid = salt.utils.win_dacl.get_owner(path) + return salt.utils.win_dacl.get_sid_string(owner_sid) def get_user(path, follow_symlinks=True): @@ -597,7 +610,7 @@ def get_user(path, follow_symlinks=True): if follow_symlinks and sys.getwindowsversion().major >= 6: path = _resolve_symlink(path) - return __utils__["dacl.get_owner"](path) + return salt.utils.win_dacl.get_owner(path) def get_mode(path): @@ -735,9 +748,9 @@ def chown(path, user, group=None, pgroup=None, follow_symlinks=True): if not os.path.exists(path): raise CommandExecutionError("Path not found: {}".format(path)) - __utils__["dacl.set_owner"](path, user) + salt.utils.win_dacl.set_owner(path, user) if pgroup: - __utils__["dacl.set_primary_group"](path, pgroup) + salt.utils.win_dacl.set_primary_group(path, pgroup) return True @@ -767,7 +780,7 @@ def chpgrp(path, group): salt '*' file.chpgrp c:\\temp\\test.txt Administrators salt '*' file.chpgrp c:\\temp\\test.txt "'None'" """ - return __utils__["dacl.set_primary_group"](path, group) + return salt.utils.win_dacl.set_primary_group(path, group) def chgrp(path, group): @@ -802,7 +815,7 @@ def chgrp(path, group): .. code-block:: bash - salt '*' file.chpgrp c:\\temp\\test.txt administrators + salt '*' file.chgrp c:\\temp\\test.txt administrators """ func_name = "{}.chgrp".format(__virtualname__) if __opts__.get("fun", "") == func_name: @@ -871,7 +884,7 @@ def stats(path, hash_type="sha256", follow_symlinks=True): ret["mtime"] = pstat.st_mtime ret["ctime"] = pstat.st_ctime ret["size"] = pstat.st_size - ret["mode"] = __utils__["files.normalize_mode"](oct(stat.S_IMODE(pstat.st_mode))) + ret["mode"] = salt.utils.files.normalize_mode(oct(stat.S_IMODE(pstat.st_mode))) if hash_type: ret["sum"] = get_sum(path, hash_type) ret["type"] = "file" @@ -1503,7 +1516,7 @@ def is_link(path): ) try: - return __utils__["path.islink"](path) + return salt.utils.path.islink(path) except Exception as exc: # pylint: disable=broad-except raise CommandExecutionError(exc) @@ -1594,10 +1607,10 @@ def mkdir( # Set owner if owner: - __utils__["dacl.set_owner"](obj_name=path, principal=owner) + salt.utils.win_dacl.set_owner(obj_name=path, principal=owner) # Set permissions - __utils__["dacl.set_perms"]( + salt.utils.win_dacl.set_perms( obj_name=path, obj_type="file", grant_perms=grant_perms, @@ -1916,7 +1929,7 @@ def check_perms( path = os.path.expanduser(path) - return __utils__["dacl.check_perms"]( + return salt.utils.win_dacl.check_perms( obj_name=path, obj_type="file", ret=ret, @@ -1925,6 +1938,7 @@ def check_perms( deny_perms=deny_perms, inheritance=inheritance, reset=reset, + test_mode=__opts__["test"], ) @@ -2002,7 +2016,7 @@ def set_perms(path, grant_perms=None, deny_perms=None, inheritance=True, reset=F # Specify advanced attributes with a list salt '*' file.set_perms C:\\Temp\\ "{'jsnuffy': {'perms': ['read_attributes', 'read_ea'], 'applies_to': 'this_folder_only'}}" """ - return __utils__["dacl.set_perms"]( + return salt.utils.win_dacl.set_perms( obj_name=path, obj_type="file", grant_perms=grant_perms, diff --git a/salt/modules/win_iis.py b/salt/modules/win_iis.py index 0c97aa84896e..42ec335bf32a 100644 --- a/salt/modules/win_iis.py +++ b/salt/modules/win_iis.py @@ -13,10 +13,9 @@ import os import re -import yaml - import salt.utils.json import salt.utils.platform +import salt.utils.yaml from salt.exceptions import CommandExecutionError, SaltInvocationError log = logging.getLogger(__name__) @@ -187,7 +186,7 @@ def _prepare_settings(pspath, settings): match = re.search(r"Collection\[(\{.*\})\]", setting["name"]) if match: name = setting["name"][: match.start(1) - 1] - match_dict = yaml.load(match.group(1)) + match_dict = salt.utils.yaml.load(match.group(1)) index = _collection_match_to_index( pspath, setting["filter"], name, match_dict ) diff --git a/salt/modules/win_lgpo.py b/salt/modules/win_lgpo.py index e8ca41f4e1be..324d49bcba36 100644 --- a/salt/modules/win_lgpo.py +++ b/salt/modules/win_lgpo.py @@ -4799,8 +4799,6 @@ def _sidConversion(cls, val, **kwargs): """ converts a list of pysid objects to string representations """ - if isinstance(val, str): - val = val.split(",") usernames = [] for _sid in val: try: @@ -4918,11 +4916,11 @@ def _dict_lookup_bitwise_add(cls, item, **kwargs): return None if value_lookup: if not isinstance(item, list): - return "Invalid Value" + return "Invalid Value: Not a list" ret_val = 0 else: if not isinstance(item, int): - return "Invalid Value" + return "Invalid Value: Not an int" ret_val = [] if "lookup" in kwargs: for k, v in kwargs["lookup"].items(): @@ -4937,7 +4935,7 @@ def _dict_lookup_bitwise_add(cls, item, **kwargs): if do_test and isinstance(k, int) and item & k == k: ret_val.append(v) else: - return "Invalid Value" + return "Invalid Value: No lookup passed" return ret_val @classmethod @@ -6927,7 +6925,7 @@ def _checkAllAdmxPolicies( if etree.QName(child_item).localname == "boolean": # https://msdn.microsoft.com/en-us/library/dn605978(v=vs.85).aspx - if child_item is not None: + if len(child_item) > 0: if ( TRUE_VALUE_XPATH(child_item) and this_element_name not in configured_elements @@ -9195,7 +9193,7 @@ def _get_policy_adm_setting( ) if etree.QName(child_item).localname == "boolean": # https://msdn.microsoft.com/en-us/library/dn605978(v=vs.85).aspx - if child_item is not None: + if len(child_item) > 0: if ( TRUE_VALUE_XPATH(child_item) and this_element_name not in configured_elements diff --git a/salt/modules/win_lgpo_reg.py b/salt/modules/win_lgpo_reg.py index cc678549ae4e..e84d0dc2ffe7 100644 --- a/salt/modules/win_lgpo_reg.py +++ b/salt/modules/win_lgpo_reg.py @@ -137,9 +137,10 @@ def write_reg_pol(data, policy_class="Machine"): Raises: SaltInvocationError: Invalid policy class + CommandExecutionError: On failure Returns: - None + bool: True if successful CLI Example: @@ -175,7 +176,6 @@ def get_value(key, v_name, policy_class="Machine"): file. Args: - key (str): The registry key where the value name resides v_name (str): The value name to retrieve @@ -228,7 +228,6 @@ def get_key(key, policy_class="Machine"): Get all the values set in a key in the ``Registry.pol`` file. Args: - key (str): The registry key where the values reside policy_class (str): The registry class to read from. Can be one of the @@ -278,7 +277,6 @@ def set_value( style policies. This is the equivalent of setting a policy to ``Enabled`` Args: - key (str): The registry key path v_name (str): The registry value name within the key @@ -305,14 +303,14 @@ def set_value( Default is ``Machine`` - Returns: - bool: ``True`` if successful, otherwise ``False`` - Raises: SaltInvocationError: Invalid policy_class SaltInvocationError: Invalid v_type SaltInvocationError: v_data doesn't match v_type + Returns: + bool: ``True`` if successful, otherwise ``False`` + CLI Example: .. code-block:: bash @@ -369,29 +367,40 @@ def set_value( if key.lower() == p_key.lower(): found_key = p_key for p_name in pol_data[p_key]: - if v_name.lower() in p_name.lower(): + if v_name.lower() == p_name.lower().lstrip("**del."): found_name = p_name if found_key: if found_name: if "**del." in found_name: + log.debug(f"LGPO_REG Mod: Found disabled name: {found_name}") pol_data[found_key][v_name] = pol_data[found_key].pop(found_name) found_name = v_name + log.debug(f"LGPO_REG Mod: Updating value: {found_name}") pol_data[found_key][found_name] = {"data": v_data, "type": v_type} else: + log.debug(f"LGPO_REG Mod: Setting new value: {found_name}") pol_data[found_key][v_name] = {"data": v_data, "type": v_type} else: + log.debug(f"LGPO_REG Mod: Adding new key and value: {found_name}") pol_data[key] = {v_name: {"data": v_data, "type": v_type}} - write_reg_pol(pol_data) + success = True + if not write_reg_pol(pol_data, policy_class=policy_class): + log.error("LGPO_REG Mod: Failed to write registry.pol file") + success = False - salt.utils.win_reg.set_value( + if not salt.utils.win_reg.set_value( hive=hive, key=key, vname=v_name, vdata=v_data, vtype=v_type, - ) + ): + log.error("LGPO_REG Mod: Failed to set registry entry") + success = False + + return success def disable_value(key, v_name, policy_class="machine"): @@ -401,7 +410,6 @@ def disable_value(key, v_name, policy_class="machine"): to ``Disabled`` in the Group Policy editor (``gpedit.msc``) Args: - key (str): The registry key path v_name (str): The registry value name within the key @@ -415,13 +423,14 @@ def disable_value(key, v_name, policy_class="machine"): Default is ``Machine`` + Raises: + SaltInvocationError: Invalid policy_class + CommandExecutionError: On failure + Returns: bool: ``True`` if successful, otherwise ``False`` None: If already disabled - Raises: - SaltInvocationError: Invalid policy_class - CLI Example: .. code-block:: bash @@ -447,28 +456,42 @@ def disable_value(key, v_name, policy_class="machine"): if key.lower() == p_key.lower(): found_key = p_key for p_name in pol_data[p_key]: - if v_name.lower() in p_name.lower(): + if v_name.lower() == p_name.lower().lstrip("**del."): found_name = p_name if found_key: if found_name: if "**del." in found_name: - # Already set to delete... do nothing + log.debug(f"LGPO_REG Mod: Already disabled: {v_name}") return None + log.debug(f"LGPO_REG Mod: Disabling value name: {v_name}") pol_data[found_key].pop(found_name) found_name = "**del.{}".format(found_name) pol_data[found_key][found_name] = {"data": " ", "type": "REG_SZ"} else: + log.debug(f"LGPO_REG Mod: Setting new disabled value name: {v_name}") pol_data[found_key]["**del.{}".format(v_name)] = { "data": " ", "type": "REG_SZ", } else: + log.debug(f"LGPO_REG Mod: Adding new key and disabled value name: {found_name}") pol_data[key] = {"**del.{}".format(v_name): {"data": " ", "type": "REG_SZ"}} - write_reg_pol(pol_data) + success = True + if not write_reg_pol(pol_data, policy_class=policy_class): + log.error("LGPO_REG Mod: Failed to write registry.pol file") + success = False + + ret = salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name) + if not ret: + if ret is None: + log.debug("LGPO_REG Mod: Registry key/value already missing") + else: + log.error("LGPO_REG Mod: Failed to remove registry entry") + success = False - salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name) + return success def delete_value(key, v_name, policy_class="Machine"): @@ -478,7 +501,6 @@ def delete_value(key, v_name, policy_class="Machine"): ``Not Configured``. Args: - key (str): The registry key path v_name (str): The registry value name within the key @@ -492,13 +514,14 @@ def delete_value(key, v_name, policy_class="Machine"): Default is ``Machine`` + Raises: + SaltInvocationError: Invalid policy_class + CommandExecutionError: On failure + Returns: bool: ``True`` if successful, otherwise ``False`` None: Key/value not present - Raises: - SaltInvocationError: Invalid policy_class - CLI Example: .. code-block:: bash @@ -525,20 +548,37 @@ def delete_value(key, v_name, policy_class="Machine"): if key.lower() == p_key.lower(): found_key = p_key for p_name in pol_data[p_key]: - if v_name.lower() in p_name.lower(): + if v_name.lower() == p_name.lower().lstrip("**del."): found_name = p_name if found_key: if found_name: + log.debug(f"LGPO_REG Mod: Removing value name: {found_name}") pol_data[found_key].pop(found_name) + else: + log.debug(f"LGPO_REG Mod: Value name not found: {v_name}") + return None if len(pol_data[found_key]) == 0: + log.debug(f"LGPO_REG Mod: Removing empty key: {found_key}") pol_data.pop(found_key) else: + log.debug(f"LGPO_REG Mod: Key not found: {key}") return None - write_reg_pol(pol_data) + success = True + if not write_reg_pol(pol_data, policy_class=policy_class): + log.error("LGPO_REG Mod: Failed to write registry.pol file") + success = False + + ret = salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name) + if not ret: + if ret is None: + log.debug("LGPO_REG Mod: Registry key/value already missing") + else: + log.error("LGPO_REG Mod: Failed to remove registry entry") + success = False - salt.utils.win_reg.delete_value(hive=hive, key=key, vname=v_name) + return success # This is for testing different settings and verifying that we are writing the diff --git a/salt/modules/win_pkg.py b/salt/modules/win_pkg.py index 46202fcad3d7..e8fdf22e4191 100644 --- a/salt/modules/win_pkg.py +++ b/salt/modules/win_pkg.py @@ -65,7 +65,7 @@ SaltInvocationError, SaltRenderError, ) -from salt.utils.versions import Version +from salt.utils.versions import LooseVersion log = logging.getLogger(__name__) @@ -215,7 +215,7 @@ def upgrade_available(name, **kwargs): refresh = salt.utils.data.is_true(kwargs.get("refresh", True)) # if latest_version returns blank, the latest version is already installed or - # their is no package definition. This is a salt standard which could be improved. + # there is no package definition. This is a salt standard which could be improved. return latest_version(name, saltenv=saltenv, refresh=refresh) != "" @@ -1298,7 +1298,7 @@ def _failed_compile(prefix_msg, error_msg): successful_verbose[short_path_name] = [] -def _get_source_sum(source_hash, file_path, saltenv): +def _get_source_sum(source_hash, file_path, saltenv, verify_ssl=True): """ Extract the hash sum, whether it is in a remote hash file, or just a string. """ @@ -1314,7 +1314,9 @@ def _get_source_sum(source_hash, file_path, saltenv): if source_hash_scheme in schemes: # The source_hash is a file on a server try: - cached_hash_file = __salt__["cp.cache_file"](source_hash, saltenv) + cached_hash_file = __salt__["cp.cache_file"]( + source_hash, saltenv=saltenv, verify_ssl=verify_ssl, use_etag=True + ) except MinionError as exc: log.exception("Failed to cache %s", source_hash, exc_info=exc) raise @@ -1361,6 +1363,28 @@ def _get_msiexec(use_msiexec): return True, "msiexec" +def normalize_name(name): + """ + Nothing to do on Windows. We need this function so that Salt doesn't go + through every module looking for ``pkg.normalize_name``. + + .. versionadded:: 3006.0 + + Args: + name (str): The name of the package + + Returns: + str: The name of the package + + CLI Example: + + .. code-block:: bash + + salt '*' pkg.normalize_name git + """ + return name + + def install(name=None, refresh=False, pkgs=None, **kwargs): r""" Install the passed package(s) on the system using winrepo @@ -1616,6 +1640,13 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): ret[pkg_name] = {"no installer": version_num} continue + # Hash the installer source after verifying it was defined + installer_hash = __salt__["cp.hash_file"](installer, saltenv) + if isinstance(installer_hash, dict): + installer_hash = installer_hash["hsum"] + else: + installer_hash = None + # Is the installer in a location that requires caching if __salt__["config.valid_fileproto"](installer): @@ -1625,6 +1656,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): # single files if cache_dir and installer.startswith("salt:"): path, _ = os.path.split(installer) + log.debug(f"PKG: Caching directory: {path}") try: __salt__["cp.cache_dir"]( path=path, @@ -1640,40 +1672,46 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): # Check to see if the cache_file is cached... if passed if cache_file and cache_file.startswith("salt:"): + cache_file_hash = __salt__["cp.hash_file"](cache_file, saltenv) + log.debug(f"PKG: Caching file: {cache_file}") + try: + cached_file = __salt__["cp.cache_file"]( + cache_file, + saltenv=saltenv, + source_hash=cache_file_hash, + verify_ssl=kwargs.get("verify_ssl", True), + ) + except MinionError as exc: + msg = "Failed to cache {}".format(cache_file) + log.exception(msg, exc_info=exc) + return "{}\n{}".format(msg, exc) - # Check to see if the file is cached - cached_file = __salt__["cp.is_cached"](cache_file, saltenv) + # Check if the cache_file was cached successfully if not cached_file: - try: - cached_file = __salt__["cp.cache_file"](cache_file, saltenv) - except MinionError as exc: - msg = "Failed to cache {}".format(cache_file) - log.exception(msg, exc_info=exc) - return "{}\n{}".format(msg, exc) - - # Make sure the cached file is the same as the source - if __salt__["cp.hash_file"](cache_file, saltenv) != __salt__[ - "cp.hash_file" - ](cached_file): - try: - cached_file = __salt__["cp.cache_file"](cache_file, saltenv) - except MinionError as exc: - msg = "Failed to cache {}".format(cache_file) - log.exception(msg, exc_info=exc) - return "{}\n{}".format(msg, exc) + log.error("Unable to cache %s", cache_file) + ret[pkg_name] = {"failed to cache cache_file": cache_file} + continue - # Check if the cache_file was cached successfully - if not cached_file: - log.error("Unable to cache %s", cache_file) - ret[pkg_name] = {"failed to cache cache_file": cache_file} - continue + # If version is "latest" we always cache because "cp.is_cached" only + # checks that the file exists, not that is has changed + cached_pkg = False + if version_num != "latest" and not installer.startswith("salt:"): + cached_pkg = __salt__["cp.is_cached"](installer, saltenv) - # Check to see if the installer is cached - cached_pkg = __salt__["cp.is_cached"](installer, saltenv) if not cached_pkg: - # It's not cached. Cache it, mate. + # Since we're passing "installer_hash", it should only cache the + # file if the source_hash doesn't match, which only works on + # files hosted on "salt://". If the http/https url supports + # etag, it should also verify that information before caching + log.debug(f"PKG: Caching file: {installer}") try: - cached_pkg = __salt__["cp.cache_file"](installer, saltenv) + cached_pkg = __salt__["cp.cache_file"]( + installer, + saltenv=saltenv, + source_hash=installer_hash, + verify_ssl=kwargs.get("verify_ssl", True), + use_etag=True, + ) except MinionError as exc: msg = "Failed to cache {}".format(installer) log.exception(msg, exc_info=exc) @@ -1686,25 +1724,6 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): ) ret[pkg_name] = {"unable to cache": installer} continue - - # Compare the hash of the cached installer to the source only if the - # file is hosted on salt: - if installer.startswith("salt:"): - if __salt__["cp.hash_file"](installer, saltenv) != __salt__[ - "cp.hash_file" - ](cached_pkg): - try: - cached_pkg = __salt__["cp.cache_file"](installer, saltenv) - except MinionError as exc: - msg = "Failed to cache {}".format(installer) - log.exception(msg, exc_info=exc) - return "{}\n{}".format(msg, exc) - - # Check if the installer was cached successfully - if not cached_pkg: - log.error("Unable to cache %s", installer) - ret[pkg_name] = {"unable to cache": installer} - continue else: # Run the installer directly (not hosted on salt:, https:, etc.) cached_pkg = installer @@ -1716,7 +1735,12 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): # Compare the hash sums source_hash = pkginfo[version_num].get("source_hash", False) if source_hash: - source_sum = _get_source_sum(source_hash, cached_pkg, saltenv) + source_sum = _get_source_sum( + source_hash, + cached_pkg, + saltenv=saltenv, + verify_ssl=kwargs.get("verify_ssl", True), + ) log.debug( "pkg.install: Source %s hash: %s", source_sum["hash_type"], @@ -1741,7 +1765,6 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): log.debug("pkg.install: Source hash matches package hash.") # Get install flags - install_flags = pkginfo[version_num].get("install_flags", "") if options and options.get("extra_install_flags"): install_flags = "{} {}".format( @@ -1768,7 +1791,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): # Install the software # Check Use Scheduler Option - log.debug("PKG : cmd: %s /s /c %s", cmd_shell, arguments) + log.debug("PKG : cmd: %s /c %s", cmd_shell, arguments) log.debug("PKG : pwd: %s", cache_path) if pkginfo[version_num].get("use_scheduler", False): # Create Scheduled Task @@ -1778,7 +1801,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): force=True, action_type="Execute", cmd=cmd_shell, - arguments='/s /c "{}"'.format(arguments), + arguments='/c "{}"'.format(arguments), start_in=cache_path, trigger_type="Once", start_date="1975-01-01", @@ -1830,7 +1853,7 @@ def install(name=None, refresh=False, pkgs=None, **kwargs): else: # Launch the command result = __salt__["cmd.run_all"]( - '"{}" /s /c "{}"'.format(cmd_shell, arguments), + '"{}" /c "{}"'.format(cmd_shell, arguments), cache_path, output_loglevel="trace", python_shell=False, @@ -2018,7 +2041,7 @@ def remove(name=None, pkgs=None, **kwargs): removal_targets.append(ver_install) else: if version_num in pkginfo: - # we known how to remove this version + # we know how to remove this version if version_num in old[pkgname]: removal_targets.append(version_num) else: @@ -2062,8 +2085,15 @@ def remove(name=None, pkgs=None, **kwargs): ret[pkgname] = {"no uninstaller defined": target} continue - # Where is the uninstaller - if uninstaller.startswith(("salt:", "http:", "https:", "ftp:")): + # Hash the uninstaller source after verifying it was defined + uninstaller_hash = __salt__["cp.hash_file"](uninstaller, saltenv) + if isinstance(uninstaller_hash, dict): + uninstaller_hash = uninstaller_hash["hsum"] + else: + uninstaller_hash = None + + # Is the uninstaller in a location that requires caching + if __salt__["config.valid_fileproto"](uninstaller): # Check for the 'cache_dir' parameter in the .sls file # If true, the entire directory will be cached instead of the @@ -2072,21 +2102,39 @@ def remove(name=None, pkgs=None, **kwargs): if cache_dir and uninstaller.startswith("salt:"): path, _ = os.path.split(uninstaller) + log.debug(f"PKG: Caching dir: {path}") try: __salt__["cp.cache_dir"]( - path, saltenv, False, None, "E@init.sls$" + path=path, + saltenv=saltenv, + include_empty=False, + include_pat=None, + exclude_pat="E@init.sls$", ) except MinionError as exc: msg = "Failed to cache {}".format(path) log.exception(msg, exc_info=exc) return "{}\n{}".format(msg, exc) - # Check to see if the uninstaller is cached + # Check to see if the uninstaller is cached. We don't want to + # check for latest here like we do for "pkg.install" because we + # only want to uninstall the version that has been installed cached_pkg = __salt__["cp.is_cached"](uninstaller, saltenv) if not cached_pkg: - # It's not cached. Cache it, mate. + # Since we're passing "uninstaller_hash", it should only + # cache the file if the source_hash doesn't match, which + # only works on files hosted on "salt://". If the http/https + # url supports etag, it should also verify that information + # before caching + log.debug(f"PKG: Caching file: {uninstaller}") try: - cached_pkg = __salt__["cp.cache_file"](uninstaller, saltenv) + cached_pkg = __salt__["cp.cache_file"]( + uninstaller, + saltenv=saltenv, + source_hash=uninstaller_hash, + verify_ssl=kwargs.get("verify_ssl", True), + use_etag=True, + ) except MinionError as exc: msg = "Failed to cache {}".format(uninstaller) log.exception(msg, exc_info=exc) @@ -2098,35 +2146,15 @@ def remove(name=None, pkgs=None, **kwargs): ret[pkgname] = {"unable to cache": uninstaller} continue - # Compare the hash of the cached installer to the source only if - # the file is hosted on salt: - # TODO cp.cache_file does cache and hash checking? So why do it again? - if uninstaller.startswith("salt:"): - if __salt__["cp.hash_file"](uninstaller, saltenv) != __salt__[ - "cp.hash_file" - ](cached_pkg): - try: - cached_pkg = __salt__["cp.cache_file"](uninstaller, saltenv) - except MinionError as exc: - msg = "Failed to cache {}".format(uninstaller) - log.exception(msg, exc_info=exc) - return "{}\n{}".format(msg, exc) - - # Check if the installer was cached successfully - if not cached_pkg: - log.error("Unable to cache %s", uninstaller) - ret[pkgname] = {"unable to cache": uninstaller} - continue else: - # Run the uninstaller directly - # (not hosted on salt:, https:, etc.) + # Run the uninstaller directly (not hosted on salt:, https:, etc.) cached_pkg = os.path.expandvars(uninstaller) # Fix non-windows slashes cached_pkg = cached_pkg.replace("/", "\\") cache_path, _ = os.path.split(cached_pkg) - # os.path.expandvars is not required as we run everything through cmd.exe /s /c + # os.path.expandvars is not required as we run everything through cmd.exe /c if kwargs.get("extra_uninstall_flags"): uninstall_flags = "{} {}".format( @@ -2154,6 +2182,8 @@ def remove(name=None, pkgs=None, **kwargs): # Uninstall the software changed.append(pkgname) # Check Use Scheduler Option + log.debug("PKG : cmd: %s /c %s", cmd_shell, arguments) + log.debug("PKG : pwd: %s", cache_path) if pkginfo[target].get("use_scheduler", False): # Create Scheduled Task __salt__["task.create_task"]( @@ -2162,7 +2192,7 @@ def remove(name=None, pkgs=None, **kwargs): force=True, action_type="Execute", cmd=cmd_shell, - arguments='/s /c "{}"'.format(arguments), + arguments='/c "{}"'.format(arguments), start_in=cache_path, trigger_type="Once", start_date="1975-01-01", @@ -2179,11 +2209,12 @@ def remove(name=None, pkgs=None, **kwargs): else: # Launch the command result = __salt__["cmd.run_all"]( - '"{}" /s /c "{}"'.format(cmd_shell, arguments), + '"{}" /c "{}"'.format(cmd_shell, arguments), output_loglevel="trace", python_shell=False, redirect_stderr=True, ) + log.debug("PKG : retcode: %s", result["retcode"]) if not result["retcode"]: ret[pkgname] = {"uninstall status": "success"} changed.append(pkgname) @@ -2356,7 +2387,7 @@ def _reverse_cmp_pkg_versions(pkg1, pkg2): """ Compare software package versions """ - return 1 if Version(pkg1) > Version(pkg2) else -1 + return 1 if LooseVersion(pkg1) > LooseVersion(pkg2) else -1 def _get_latest_pkg_version(pkginfo): diff --git a/salt/modules/win_service.py b/salt/modules/win_service.py index f5ff82c01c0a..f788f7559d9a 100644 --- a/salt/modules/win_service.py +++ b/salt/modules/win_service.py @@ -486,12 +486,16 @@ def status(name, *args, **kwargs): .. versionchanged:: 2018.3.0 The service name can now be a glob (e.g. ``salt*``) + .. versionchanged:: 3006.0 + Returns "Not Found" if the service is not found on the system + Args: name (str): The name of the service to check Returns: bool: True if running, False otherwise dict: Maps service name to True if running, False otherwise + str: Not Found if the service is not found on the system CLI Example: @@ -508,7 +512,10 @@ def status(name, *args, **kwargs): else: services = [name] for service in services: - results[service] = info(service)["Status"] in ["Running", "Stop Pending"] + try: + results[service] = info(service)["Status"] in ["Running", "Stop Pending"] + except CommandExecutionError: + results[service] = "Not Found" if contains_globbing: return results return results[name] diff --git a/salt/modules/win_system.py b/salt/modules/win_system.py index b203ae36a060..0801d40aa932 100644 --- a/salt/modules/win_system.py +++ b/salt/modules/win_system.py @@ -755,7 +755,7 @@ def join_domain( ``True`` will restart the computer after a successful join. Default is ``False`` - .. versionadded:: 2015.8.2/2015.5.7 + .. versionadded:: 2015.5.7,2015.8.2 Returns: dict: Returns a dictionary if successful, otherwise ``False`` @@ -889,7 +889,7 @@ def unjoin_domain( workgroup (str): The workgroup to join the computer to. Default is ``WORKGROUP`` - .. versionadded:: 2015.8.2/2015.5.7 + .. versionadded:: 2015.5.7,2015.8.2 disable (bool): ``True`` to disable the computer account in Active Directory. @@ -899,7 +899,7 @@ def unjoin_domain( ``True`` will restart the computer after successful unjoin. Default is ``False`` - .. versionadded:: 2015.8.2/2015.5.7 + .. versionadded:: 2015.5.7,2015.8.2 Returns: dict: Returns a dictionary if successful, otherwise ``False`` diff --git a/salt/modules/win_task.py b/salt/modules/win_task.py index 7d60d35fc552..8c9f2718dbd2 100644 --- a/salt/modules/win_task.py +++ b/salt/modules/win_task.py @@ -19,6 +19,7 @@ try: import pythoncom + import pywintypes import win32com.client HAS_DEPENDENCIES = True @@ -359,7 +360,13 @@ def list_tasks(location="\\"): task_service.Connect() # Get the folder to list tasks from - task_folder = task_service.GetFolder(location) + try: + task_folder = task_service.GetFolder(location) + except pywintypes.com_error: + msg = "Unable to load location: {}".format(location) + log.error(msg) + raise CommandExecutionError(msg) + tasks = task_folder.GetTasks(0) ret = [] @@ -1129,12 +1136,13 @@ def edit_task( # TODO: Check triggers for end_boundary if delete_after is False: task_definition.Settings.DeleteExpiredTaskAfter = "" - if delete_after in duration: - task_definition.Settings.DeleteExpiredTaskAfter = _lookup_first( - duration, delete_after - ) else: - return 'Invalid value for "delete_after"' + if delete_after in duration: + task_definition.Settings.DeleteExpiredTaskAfter = _lookup_first( + duration, delete_after + ) + else: + return 'Invalid value for "delete_after"' if multiple_instances is not None: task_definition.Settings.MultipleInstances = instances[multiple_instances] @@ -1567,6 +1575,16 @@ def info(name, location="\\"): trigger["delay"] = _reverse_lookup(duration, triggerObj.Delay) else: trigger["delay"] = False + if hasattr(triggerObj, "Repetition"): + trigger["repeat_duration"] = _reverse_lookup( + duration, triggerObj.Repetition.Duration + ) + trigger["repeat_interval"] = _reverse_lookup( + duration, triggerObj.Repetition.Interval + ) + trigger[ + "repeat_stop_at_duration_end" + ] = triggerObj.Repetition.StopAtDurationEnd triggers.append(trigger) properties["settings"] = settings diff --git a/salt/modules/x509_v2.py b/salt/modules/x509_v2.py index b46d4cf57d7f..ba26d7b6b2e8 100644 --- a/salt/modules/x509_v2.py +++ b/salt/modules/x509_v2.py @@ -255,8 +255,8 @@ def create_certificate( Instead of returning the certificate, write it to this file path. overwrite - If ``path`` is specified and the file exists, do not overwrite it. - Defaults to false. + If ``path`` is specified and the file exists, overwrite it. + Defaults to true. raw Return the encoded raw bytes instead of a string. Defaults to false. @@ -614,7 +614,7 @@ def _create_certificate_local( path=os.path.join(copypath, f"{prepend}{cert.serial_number:x}.crt"), pem_type="CERTIFICATE", ) - return builder.sign(signing_private_key, algorithm=algorithm), private_key_loaded + return cert, private_key_loaded def encode_certificate( @@ -901,8 +901,11 @@ def create_crl( salt.utils.versions.kwargs_warn_until(["text"], "Potassium") kwargs.pop("text") - if kwargs: - raise SaltInvocationError(f"Unrecognized keyword arguments: {list(kwargs)}") + unknown = [kwarg for kwarg in kwargs if not kwarg.startswith("_")] + if unknown: + raise SaltInvocationError( + f"Unrecognized keyword arguments: {list(unknown)}" + ) if days_valid is None: try: @@ -1193,7 +1196,7 @@ def create_private_key( keysize For ``rsa``, specifies the bitlength of the private key (2048, 3072, 4096). For ``ec``, specifies the NIST curve to use (256, 384, 521). - Irrelevant for Edwards-curve schemes (`ed25519``, ``ed448``). + Irrelevant for Edwards-curve schemes (``ed25519``, ``ed448``). Defaults to 2048 for RSA and 256 for EC. passphrase @@ -1235,13 +1238,15 @@ def create_private_key( for x in ignored_params: kwargs.pop(x) - if kwargs: - raise SaltInvocationError(f"Unrecognized keyword arguments: {list(kwargs)}") + unknown = [kwarg for kwarg in kwargs if not kwarg.startswith("_")] + if unknown: + raise SaltInvocationError(f"Unrecognized keyword arguments: {list(unknown)}") if encoding not in ["der", "pem", "pkcs12"]: raise CommandExecutionError( f"Invalid value '{encoding}' for encoding. Valid: der, pem, pkcs12" ) + out = encode_private_key( _generate_pk(algo=algo, keysize=keysize), encoding=encoding, @@ -1254,7 +1259,9 @@ def create_private_key( return out if encoding == "pem": - return write_pem(out.decode(), path, pem_type="(?:RSA )?PRIVATE KEY") + return write_pem( + out.decode(), path, pem_type="(?:(RSA|ENCRYPTED) )?PRIVATE KEY" + ) with salt.utils.files.fopen(path, "wb") as fp_: fp_.write(out) return @@ -1264,6 +1271,7 @@ def encode_private_key( private_key, encoding="pem", passphrase=None, + private_key_passphrase=None, pkcs12_encryption_compat=False, raw=False, ): @@ -1276,7 +1284,7 @@ def encode_private_key( salt '*' x509.encode_private_key /etc/pki/my.key der - csr + private_key The private key to encode. encoding @@ -1284,6 +1292,23 @@ def encode_private_key( as a ``pem`` string, base64-encoded ``der`` and base64-encoded ``pkcs12``. Defaults to ``pem``. + passphrase + If this is specified, the private key will be encrypted using this + passphrase. The encryption algorithm cannot be selected, it will be + determined automatically as the best available one. + + private_key_passphrase + .. versionadded:: 3006.2 + + If the current ``private_key`` is encrypted, the passphrase to + decrypt it. + + pkcs12_encryption_compat + Some operating systems are incompatible with the encryption defaults + for PKCS12 used since OpenSSL v3. This switch triggers a fallback to + ``PBESv1SHA1And3KeyTripleDESCBC``. + Please consider the `notes on PKCS12 encryption `_. + raw Return the encoded raw bytes instead of a string. Defaults to false. """ @@ -1291,6 +1316,7 @@ def encode_private_key( raise CommandExecutionError( f"Invalid value '{encoding}' for encoding. Valid: der, pem, pkcs12" ) + private_key = x509util.load_privkey(private_key, passphrase=private_key_passphrase) if passphrase is None: cipher = serialization.NoEncryption() else: @@ -1549,7 +1575,7 @@ def get_public_key(key, passphrase=None, asObj=None): except SaltInvocationError: pass raise CommandExecutionError( - "Could not load key as certificate, public key, private key, CSR or CRL" + "Could not load key as certificate, public key, private key or CSR" ) @@ -1936,7 +1962,7 @@ def verify_private_key(private_key, public_key, passphrase=None): passphrase If ``private_key`` is encrypted, the passphrase to decrypt it. """ - privkey = x509util.load_privkey(private_key, passphrase=None) + privkey = x509util.load_privkey(private_key, passphrase=passphrase) pubkey = x509util.load_pubkey(get_public_key(public_key)) return x509util.is_pair(pubkey, privkey) diff --git a/salt/modules/yumpkg.py b/salt/modules/yumpkg.py index 4d0070f21a86..f794389c8619 100644 --- a/salt/modules/yumpkg.py +++ b/salt/modules/yumpkg.py @@ -14,6 +14,7 @@ .. versionadded:: 3003 Support for ``tdnf`` on Photon OS. + """ @@ -29,7 +30,6 @@ import salt.utils.args import salt.utils.data -import salt.utils.decorators.path import salt.utils.environment import salt.utils.files import salt.utils.functools @@ -43,13 +43,6 @@ from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError from salt.utils.versions import LooseVersion -try: - import yum - - HAS_YUM = True -except ImportError: - HAS_YUM = False - log = logging.getLogger(__name__) __HOLD_PATTERN = r"[\w+]+(?:[.-][^-]+)*" @@ -211,7 +204,7 @@ def _yum_pkginfo(output): keys = itertools.cycle(("name", "version", "repoid")) values = salt.utils.itertools.split(_strip_headers(output)) osarch = __grains__["osarch"] - for (key, value) in zip(keys, values): + for key, value in zip(keys, values): if key == "name": try: cur["name"], cur["arch"] = value.rsplit(".", 1) @@ -353,67 +346,48 @@ def _get_yum_config(strict_parser=True): This is currently only used to get the reposdir settings, but could be used for other things if needed. - If the yum python library is available, use that, which will give us all of - the options, including all of the defaults not specified in the yum config. - Additionally, they will all be of the correct object type. - - If the yum library is not available, we try to read the yum.conf - directly ourselves with a minimal set of "defaults". + We try to read the yum.conf directly ourselves with a minimal set of + "defaults". """ # in case of any non-fatal failures, these defaults will be used conf = { "reposdir": ["/etc/yum/repos.d", "/etc/yum.repos.d"], } - if HAS_YUM: - try: - yb = yum.YumBase() - yb.preconf.init_plugins = False - for name, value in yb.conf.items(): - conf[name] = value - except (AttributeError, yum.Errors.ConfigError) as exc: - raise CommandExecutionError("Could not query yum config: {}".format(exc)) - except yum.Errors.YumBaseError as yum_base_error: - raise CommandExecutionError( - "Error accessing yum or rpmdb: {}".format(yum_base_error) - ) - else: - # fall back to parsing the config ourselves - # Look for the config the same order yum does - fn = None - paths = ( - "/etc/yum/yum.conf", - "/etc/yum.conf", - "/etc/dnf/dnf.conf", - "/etc/tdnf/tdnf.conf", - ) - for path in paths: - if os.path.exists(path): - fn = path - break - - if not fn: - raise CommandExecutionError( - "No suitable yum config file found in: {}".format(paths) - ) + # fall back to parsing the config ourselves + # Look for the config the same order yum does + fn = None + paths = ( + "/etc/yum/yum.conf", + "/etc/yum.conf", + "/etc/dnf/dnf.conf", + "/etc/tdnf/tdnf.conf", + ) + for path in paths: + if os.path.exists(path): + fn = path + break - cp = configparser.ConfigParser(strict=strict_parser) - try: - cp.read(fn) - except OSError as exc: - raise CommandExecutionError("Unable to read from {}: {}".format(fn, exc)) + if not fn: + raise CommandExecutionError( + "No suitable yum config file found in: {}".format(paths) + ) - if cp.has_section("main"): - for opt in cp.options("main"): - if opt in ("reposdir", "commands", "excludes"): - # these options are expected to be lists - conf[opt] = [x.strip() for x in cp.get("main", opt).split(",")] - else: - conf[opt] = cp.get("main", opt) - else: - log.warning( - "Could not find [main] section in %s, using internal defaults", fn - ) + cp = configparser.ConfigParser(strict=strict_parser) + try: + cp.read(fn) + except OSError as exc: + raise CommandExecutionError("Unable to read from {}: {}".format(fn, exc)) + + if cp.has_section("main"): + for opt in cp.options("main"): + if opt in ("reposdir", "commands", "excludes"): + # these options are expected to be lists + conf[opt] = [x.strip() for x in cp.get("main", opt).split(",")] + else: + conf[opt] = cp.get("main", opt) + else: + log.warning("Could not find [main] section in %s, using internal defaults", fn) return conf @@ -746,6 +720,8 @@ def list_pkgs(versions_as_list=False, **kwargs): cmd = [ "rpm", "-qa", + "--nodigest", + "--nosignature", "--queryformat", salt.utils.pkg.rpm.QUERYFORMAT.replace("%{REPOID}", "(none)") + "\n", ] @@ -1905,7 +1881,7 @@ def upgrade( Disable exclude from main, for a repo or for everything. (e.g., ``yum --disableexcludes='main'``) - .. versionadded:: 2014.7 + .. versionadded:: 2014.7.0 name The name of the package to be upgraded. Note that this parameter is @@ -2446,7 +2422,7 @@ def unhold(name=None, pkgs=None, sources=None, **kwargs): # pylint: disable=W06 def list_holds(pattern=__HOLD_PATTERN, full=True): r""" - .. versionchanged:: 2016.3.0,2015.8.4,2015.5.10 + .. versionchanged:: 2015.5.10,2015.8.4,2016.3.0 Function renamed from ``pkg.get_locked_pkgs`` to ``pkg.list_holds``. List information on locked packages @@ -2570,10 +2546,10 @@ def group_list(): return ret -def group_info(name, expand=False, ignore_groups=None): +def group_info(name, expand=False, ignore_groups=None, **kwargs): """ .. versionadded:: 2014.1.0 - .. versionchanged:: 3001,2016.3.0,2015.8.4,2015.5.10 + .. versionchanged:: 2015.5.10,2015.8.4,2016.3.0,3001 The return data has changed. A new key ``type`` has been added to distinguish environment groups from package groups. Also, keys for the group name and group ID have been added. The ``mandatory packages``, @@ -2581,6 +2557,10 @@ def group_info(name, expand=False, ignore_groups=None): to ``mandatory``, ``optional``, and ``default`` for accuracy, as environment groups include other groups, and not packages. Finally, this function now properly identifies conditional packages. + .. versionchanged:: 3006.2 + Support for ``fromrepo``, ``enablerepo``, and ``disablerepo`` (as used + in :py:func:`pkg.install `) has been + added. Lists packages belonging to a certain group @@ -2601,18 +2581,46 @@ def group_info(name, expand=False, ignore_groups=None): .. versionadded:: 3001 + fromrepo + Restrict ``yum groupinfo`` to the specified repo(s). + (e.g., ``yum --disablerepo='*' --enablerepo='somerepo'``) + + .. versionadded:: 3006.2 + + enablerepo (ignored if ``fromrepo`` is specified) + Specify a disabled package repository (or repositories) to enable. + (e.g., ``yum --enablerepo='somerepo'``) + + .. versionadded:: 3006.2 + + disablerepo (ignored if ``fromrepo`` is specified) + Specify an enabled package repository (or repositories) to disable. + (e.g., ``yum --disablerepo='somerepo'``) + + .. versionadded:: 3006.2 + CLI Example: .. code-block:: bash salt '*' pkg.group_info 'Perl Support' + salt '*' pkg.group_info 'Perl Support' fromrepo=base,updates + salt '*' pkg.group_info 'Perl Support' enablerepo=somerepo """ pkgtypes = ("mandatory", "optional", "default", "conditional") ret = {} for pkgtype in pkgtypes: ret[pkgtype] = set() - cmd = [_yum(), "--quiet", "groupinfo", name] + options = _get_options( + **{ + key: val + for key, val in kwargs.items() + if key in ("fromrepo", "enablerepo", "disablerepo") + } + ) + + cmd = [_yum(), "--quiet"] + options + ["groupinfo", name] out = __salt__["cmd.run_stdout"](cmd, output_loglevel="trace", python_shell=False) g_info = {} @@ -2680,22 +2688,49 @@ def group_info(name, expand=False, ignore_groups=None): return ret -def group_diff(name): +def group_diff(name, **kwargs): """ .. versionadded:: 2014.1.0 - .. versionchanged:: 2016.3.0,2015.8.4,2015.5.10 + .. versionchanged:: 2015.5.10,2015.8.4,2016.3.0 Environment groups are now supported. The key names have been renamed, similar to the changes made in :py:func:`pkg.group_info `. + .. versionchanged:: 3006.2 + Support for ``fromrepo``, ``enablerepo``, and ``disablerepo`` (as used + in :py:func:`pkg.install `) has been + added. Lists which of a group's packages are installed and which are not installed + name + The name of the group to check + + fromrepo + Restrict ``yum groupinfo`` to the specified repo(s). + (e.g., ``yum --disablerepo='*' --enablerepo='somerepo'``) + + .. versionadded:: 3006.2 + + enablerepo (ignored if ``fromrepo`` is specified) + Specify a disabled package repository (or repositories) to enable. + (e.g., ``yum --enablerepo='somerepo'``) + + .. versionadded:: 3006.2 + + disablerepo (ignored if ``fromrepo`` is specified) + Specify an enabled package repository (or repositories) to disable. + (e.g., ``yum --disablerepo='somerepo'``) + + .. versionadded:: 3006.2 + CLI Example: .. code-block:: bash salt '*' pkg.group_diff 'Perl Support' + salt '*' pkg.group_diff 'Perl Support' fromrepo=base,updates + salt '*' pkg.group_diff 'Perl Support' enablerepo=somerepo """ pkgtypes = ("mandatory", "optional", "default", "conditional") ret = {} @@ -2703,7 +2738,7 @@ def group_diff(name): ret[pkgtype] = {"installed": [], "not installed": []} pkgs = list_pkgs() - group_pkgs = group_info(name, expand=True) + group_pkgs = group_info(name, expand=True, **kwargs) for pkgtype in pkgtypes: for member in group_pkgs.get(pkgtype, []): if member in pkgs: @@ -2800,7 +2835,7 @@ def group_install(name, skip=(), include=(), **kwargs): if not pkgs: return {} - return install(pkgs=pkgs, **kwargs) + return install(pkgs=list(set(pkgs)), **kwargs) groupinstall = salt.utils.functools.alias_function(group_install, "groupinstall") @@ -3297,7 +3332,6 @@ def modified(*packages, **flags): return __salt__["lowpkg.modified"](*packages, **flags) -@salt.utils.decorators.path.which("yumdownloader") def download(*packages, **kwargs): """ .. versionadded:: 2015.5.0 @@ -3317,6 +3351,9 @@ def download(*packages, **kwargs): salt '*' pkg.download httpd salt '*' pkg.download httpd postfix """ + if not salt.utils.path.which("yumdownloader"): + raise CommandExecutionError("'yumdownloader' command not available") + if not packages: raise SaltInvocationError("No packages were specified") diff --git a/salt/modules/zabbix.py b/salt/modules/zabbix.py index d6fdeacc9290..f09ca1df3ba8 100644 --- a/salt/modules/zabbix.py +++ b/salt/modules/zabbix.py @@ -146,7 +146,7 @@ def _query(method, params, url, auth=None): :return: Response from API with desired data in JSON format. In case of error returns more specific description. - .. versionchanged:: 2017.7 + .. versionchanged:: 2017.7.0 """ unauthenticated_methods = [ @@ -311,7 +311,7 @@ def _map_to_list_of_dicts(source, key): def get_zabbix_id_mapper(): """ - .. versionadded:: 2017.7 + .. versionadded:: 2017.7.0 Make ZABBIX_ID_MAPPER constant available to state modules. @@ -328,7 +328,7 @@ def get_zabbix_id_mapper(): def substitute_params(input_object, extend_params=None, filter_key="name", **kwargs): """ - .. versionadded:: 2017.7 + .. versionadded:: 2017.7.0 Go through Zabbix object params specification and if needed get given object ID from Zabbix API and put it back as a value. Definition of the object is done via dict with keys "query_object" and "query_name". @@ -385,7 +385,7 @@ def substitute_params(input_object, extend_params=None, filter_key="name", **kwa # pylint: disable=too-many-return-statements,too-many-nested-blocks def compare_params(defined, existing, return_old_value=False): """ - .. versionadded:: 2017.7 + .. versionadded:: 2017.7.0 Compares Zabbix object definition against existing Zabbix object. @@ -471,7 +471,7 @@ def compare_params(defined, existing, return_old_value=False): def get_object_id_by_params(obj, params=None, **connection_args): """ - .. versionadded:: 2017.7 + .. versionadded:: 2017.7.0 Get ID of single Zabbix object specified by its name. @@ -2703,7 +2703,7 @@ def run_query(method, params, **connection_args): def configuration_import(config_file, rules=None, file_format="xml", **connection_args): """ - .. versionadded:: 2017.7 + .. versionadded:: 2017.7.0 Imports Zabbix configuration specified in file to Zabbix server. diff --git a/salt/modules/zcbuildout.py b/salt/modules/zcbuildout.py index 6d06bfa5ac77..99b9202011e9 100644 --- a/salt/modules/zcbuildout.py +++ b/salt/modules/zcbuildout.py @@ -1023,6 +1023,3 @@ def _check_onlyif_unless(onlyif, unless, directory, runas=None, env=()): if status["status"]: ret = status return ret - - -# vim:set et sts=4 ts=4 tw=80: diff --git a/salt/modules/zfs.py b/salt/modules/zfs.py index d583108a43a9..7df620e3eb65 100644 --- a/salt/modules/zfs.py +++ b/salt/modules/zfs.py @@ -1249,6 +1249,3 @@ def get(*dataset, **kwargs): del ds_data["property"] return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/zoneadm.py b/salt/modules/zoneadm.py index 4b2b91c0d22a..0bb4a60e26ed 100644 --- a/salt/modules/zoneadm.py +++ b/salt/modules/zoneadm.py @@ -570,6 +570,3 @@ def clone(zone, source, snapshot=None): del ret["message"] return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/modules/zonecfg.py b/salt/modules/zonecfg.py index bf3b913faf64..a90408047970 100644 --- a/salt/modules/zonecfg.py +++ b/salt/modules/zonecfg.py @@ -799,6 +799,3 @@ def info(zone, show_all=False): ret[resname].append(resdata) return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/netapi/rest_cherrypy/app.py b/salt/netapi/rest_cherrypy/app.py index f60ee90e47df..e65bca8f2d9e 100644 --- a/salt/netapi/rest_cherrypy/app.py +++ b/salt/netapi/rest_cherrypy/app.py @@ -1416,7 +1416,7 @@ def POST(self, **kwargs): POST /minions HTTP/1.1 Host: localhost:8000 Accept: application/x-yaml - Content-Type: application/json + Content-Type: application/x-www-form-urlencoded tgt=*&fun=status.diskusage diff --git a/salt/netapi/rest_tornado/__init__.py b/salt/netapi/rest_tornado/__init__.py index 67336d0adaa6..9ab2569c822a 100644 --- a/salt/netapi/rest_tornado/__init__.py +++ b/salt/netapi/rest_tornado/__init__.py @@ -3,6 +3,7 @@ import os import salt.auth +from salt.config import DEFAULT_HASH_TYPE from salt.utils.versions import Version __virtualname__ = os.path.abspath(__file__).rsplit(os.sep)[-2] or "rest_tornado" @@ -59,7 +60,9 @@ def get_application(opts): from . import saltnado_websockets token_pattern = r"([0-9A-Fa-f]{{{0}}})".format( - len(getattr(hashlib, opts.get("hash_type", "md5"))().hexdigest()) + len( + getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE))().hexdigest() + ) ) all_events_pattern = r"/all_events/{}".format(token_pattern) formatted_events_pattern = r"/formatted_events/{}".format(token_pattern) diff --git a/salt/pillar/__init__.py b/salt/pillar/__init__.py index 5a3f5388b408..d047f367fb5b 100644 --- a/salt/pillar/__init__.py +++ b/salt/pillar/__init__.py @@ -8,8 +8,8 @@ import logging import os import sys +import time import traceback -import uuid import salt.channel.client import salt.ext.tornado.gen @@ -260,6 +260,7 @@ def compile_pillar(self): if self.ext: load["ext"] = self.ext try: + start = time.monotonic() ret_pillar = yield self.channel.crypted_transfer_decode_dictentry( load, dictkey="pillar", @@ -267,6 +268,10 @@ def compile_pillar(self): except salt.crypt.AuthenticationError as exc: log.error(exc.message) raise SaltClientError("Exception getting pillar.") + except salt.exceptions.SaltReqTimeoutError: + raise SaltClientError( + f"Pillar timed out after {int(time.monotonic() - start)} seconds" + ) except Exception: # pylint: disable=broad-except log.exception("Exception getting pillar:") raise SaltClientError("Exception getting pillar.") @@ -351,10 +356,23 @@ def compile_pillar(self): } if self.ext: load["ext"] = self.ext - ret_pillar = self.channel.crypted_transfer_decode_dictentry( - load, - dictkey="pillar", - ) + + try: + start = time.monotonic() + ret_pillar = self.channel.crypted_transfer_decode_dictentry( + load, + dictkey="pillar", + ) + except salt.crypt.AuthenticationError as exc: + log.error(exc.message) + raise SaltClientError("Exception getting pillar.") + except salt.exceptions.SaltReqTimeoutError: + raise SaltClientError( + f"Pillar timed out after {int(time.monotonic() - start)} seconds" + ) + except Exception: # pylint: disable=broad-except + log.exception("Exception getting pillar:") + raise SaltClientError("Exception getting pillar.") if not isinstance(ret_pillar, dict): log.error( @@ -926,7 +944,7 @@ def render_pstate(self, sls, saltenv, mods, defaults=None): saltenv, sls, _pillar_rend=True, - **defaults + **defaults, ) except Exception as exc: # pylint: disable=broad-except msg = "Rendering SLS '{}' failed, render error:\n{}".format(sls, exc) @@ -1105,7 +1123,7 @@ def _external_pillar_data(self, pillar, val, key): self.minion_id, pillar, extra_minion_data=self.extra_minion_data, - **val + **val, ) else: ext = self.ext_pillars[key](self.minion_id, pillar, **val) @@ -1115,7 +1133,7 @@ def _external_pillar_data(self, pillar, val, key): self.minion_id, pillar, *val, - extra_minion_data=self.extra_minion_data + extra_minion_data=self.extra_minion_data, ) else: ext = self.ext_pillars[key](self.minion_id, pillar, *val) @@ -1341,6 +1359,11 @@ def destroy(self): if self._closing: return self._closing = True + if self.client: + try: + self.client.destroy() + except AttributeError: + pass # pylint: disable=W1701 def __del__(self): diff --git a/salt/pillar/hg_pillar.py b/salt/pillar/hg_pillar.py index 3a183a04568d..b4ce24ac8a69 100644 --- a/salt/pillar/hg_pillar.py +++ b/salt/pillar/hg_pillar.py @@ -23,6 +23,7 @@ import salt.pillar import salt.utils.stringutils +from salt.config import DEFAULT_HASH_TYPE try: import hglib @@ -90,7 +91,7 @@ def __init__(self, repo_uri): """Initialize a hg repo (or open it if it already exists)""" self.repo_uri = repo_uri cachedir = os.path.join(__opts__["cachedir"], "hg_pillar") - hash_type = getattr(hashlib, __opts__.get("hash_type", "md5")) + hash_type = getattr(hashlib, __opts__.get("hash_type", DEFAULT_HASH_TYPE)) repo_hash = hash_type(salt.utils.stringutils.to_bytes(repo_uri)).hexdigest() self.working_dir = os.path.join(cachedir, repo_hash) if not os.path.isdir(self.working_dir): diff --git a/salt/pillar/netbox.py b/salt/pillar/netbox.py index 808c58c9d894..bbbf765e62a0 100644 --- a/salt/pillar/netbox.py +++ b/salt/pillar/netbox.py @@ -56,28 +56,28 @@ Whether should retrieve the prefixes of the site the device belongs to. devices: ``True`` - .. versionadded:: 3004.0 + .. versionadded:: 3004 Whether should retrieve physical devices. virtual_machines: ``False`` - .. versionadded:: 3004.0 + .. versionadded:: 3004 Whether should retrieve virtual machines. interfaces: ``False`` - .. versionadded:: 3004.0 + .. versionadded:: 3004 Whether should retrieve the interfaces of the device. interface_ips: ``False`` - .. versionadded:: 3004.0 + .. versionadded:: 3004 Whether should retrieve the IP addresses for interfaces of the device. (interfaces must be set to True as well) api_query_result_limit: ``Use NetBox default`` - .. versionadded:: 3004.0 + .. versionadded:: 3004 An integer specifying how many results should be returned for each query to the NetBox API. Leaving this unset will use NetBox's default value. diff --git a/salt/pillar/sql_base.py b/salt/pillar/sql_base.py index 372dced91cce..3edd3ad0a87d 100644 --- a/salt/pillar/sql_base.py +++ b/salt/pillar/sql_base.py @@ -198,22 +198,20 @@ with_lists: [1,3] """ -import abc # Added in python2.6 so always available +import abc import logging from salt.utils.dictupdate import update from salt.utils.odict import OrderedDict +log = logging.getLogger(__name__) + # Please don't strip redundant parentheses from this file. # I have added some for clarity. # tests/unit/pillar/mysql_test.py may help understand this code. -# Set up logging -log = logging.getLogger(__name__) - - # This ext_pillar is abstract and cannot be used directory def __virtual__(): return False diff --git a/salt/renderers/mako.py b/salt/renderers/mako.py index c2c300c38004..01d413140ba1 100644 --- a/salt/renderers/mako.py +++ b/salt/renderers/mako.py @@ -1,5 +1,13 @@ """ Mako Renderer for Salt + +This renderer requires the Mako library. + +To install Mako, do the following: + +.. code-block: bash + + salt-pip install mako """ diff --git a/salt/renderers/nacl.py b/salt/renderers/nacl.py index 156913150e2d..9cd2ba0f46b4 100644 --- a/salt/renderers/nacl.py +++ b/salt/renderers/nacl.py @@ -10,8 +10,7 @@ secret key on your master. You can put the public key in source control so that developers can add new secrets quickly and easily. -This renderer requires the libsodium library binary and libnacl >= 1.5.1 -python package (support for sealed boxes came in 1.5.1 version). +This renderer requires the libsodium library binary and PyNacl >= 1.0 Setup diff --git a/salt/renderers/py.py b/salt/renderers/py.py index 0f67d7e62e71..3ce58ac405c1 100644 --- a/salt/renderers/py.py +++ b/salt/renderers/py.py @@ -46,8 +46,24 @@ ``/srv/salt/foo/bar/baz.sls``, then ``__sls__`` in that file will be ``foo.bar.baz``. -When writing a reactor SLS file the global context ``data`` (same as context ``{{ data }}`` -for states written with Jinja + YAML) is available. The following YAML + Jinja state declaration: +When used in a scenario where additional user-provided context data is supplied +(such as with :mod:`file.managed `), the additional +data will typically be injected into the script as one or more global +variables: + +.. code-block:: jinja + + /etc/http/conf/http.conf: + file.managed: + - source: salt://apache/generate_http_conf.py + - template: py + - context: + # Will be injected as the global variable "site_name". + site_name: {{ site_name }} + +When writing a reactor SLS file the global context ``data`` (same as context +``{{ data }}`` for states written with Jinja + YAML) is available. The +following YAML + Jinja state declaration: .. code-block:: jinja diff --git a/salt/renderers/pyobjects.py b/salt/renderers/pyobjects.py index b65898d1bd58..ad56246396b7 100644 --- a/salt/renderers/pyobjects.py +++ b/salt/renderers/pyobjects.py @@ -430,9 +430,6 @@ def render(template, saltenv="base", sls="", salt_data=True, **kwargs): if not salt_data: return _globals - # this will be used to fetch any import files - client = get_file_client(__opts__) - # process our sls imports # # we allow pyobjects users to use a special form of the import statement @@ -461,15 +458,18 @@ def process_template(template): # that we're importing everything imports = None - state_file = client.cache_file(import_file, saltenv) - if not state_file: - raise ImportError( - "Could not find the file '{}'".format(import_file) - ) - - with salt.utils.files.fopen(state_file) as state_fh: - state_contents, state_globals = process_template(state_fh) - exec(state_contents, state_globals) + # this will be used to fetch any import files + # For example salt://test.sls + with get_file_client(__opts__) as client: + state_file = client.cache_file(import_file, saltenv) + if not state_file: + raise ImportError( + "Could not find the file '{}'".format(import_file) + ) + + with salt.utils.files.fopen(state_file) as state_fh: + state_contents, state_globals = process_template(state_fh) + exec(state_contents, state_globals) # if no imports have been specified then we are being imported as: import salt://foo.sls # so we want to stick all of the locals from our state file into the template globals diff --git a/salt/roster/ansible.py b/salt/roster/ansible.py index 351cb9f5e694..52c16cedb0d4 100644 --- a/salt/roster/ansible.py +++ b/salt/roster/ansible.py @@ -7,16 +7,16 @@ # /tmp/example_roster [servers] - salt.gtmanfred.com ansible_ssh_user=gtmanfred ansible_ssh_host=127.0.0.1 ansible_ssh_port=22 ansible_ssh_pass='password' + salt.gtmanfred.com ansible_ssh_user=gtmanfred ansible_ssh_host=127.0.0.1 ansible_ssh_port=22 ansible_ssh_pass='password' ansible_sudo_pass='password' [desktop] - home ansible_ssh_user=gtmanfred ansible_ssh_host=12.34.56.78 ansible_ssh_port=23 ansible_ssh_pass='password' + home ansible_ssh_user=gtmanfred ansible_ssh_host=12.34.56.78 ansible_ssh_port=23 ansible_ssh_pass='password' ansible_sudo_pass='password' [computers:children] desktop servers - [names:vars] + [computers:vars] http_port=80 then salt-ssh can be used to hit any of them @@ -47,35 +47,40 @@ #!/bin/bash # filename: /etc/salt/hosts echo '{ - "servers": [ - "salt.gtmanfred.com" - ], - "desktop": [ - "home" - ], - "computers": { - "hosts": [], - "children": [ - "desktop", - "servers" - ] - }, - "_meta": { - "hostvars": { - "salt.gtmanfred.com": { - "ansible_ssh_user": "gtmanfred", - "ansible_ssh_host": "127.0.0.1", - "ansible_sudo_pass": "password", - "ansible_ssh_port": 22 - }, - "home": { - "ansible_ssh_user": "gtmanfred", - "ansible_ssh_host": "12.34.56.78", - "ansible_sudo_pass": "password", - "ansible_ssh_port": 23 - } + "servers": [ + "salt.gtmanfred.com" + ], + "desktop": [ + "home" + ], + "computers": { + "hosts": [], + "children": [ + "desktop", + "servers" + ], + "vars": { + "http_port": 80 + } + }, + "_meta": { + "hostvars": { + "salt.gtmanfred.com": { + "ansible_ssh_user": "gtmanfred", + "ansible_ssh_host": "127.0.0.1", + "ansible_sudo_pass": "password", + "ansible_ssh_pass": "password", + "ansible_ssh_port": 22 + }, + "home": { + "ansible_ssh_user": "gtmanfred", + "ansible_ssh_host": "12.34.56.78", + "ansible_sudo_pass": "password", + "ansible_ssh_pass": "password", + "ansible_ssh_port": 23 + } + } } - } }' This is the format that an inventory script needs to output to work with ansible, and thus here. diff --git a/salt/roster/terraform.py b/salt/roster/terraform.py index 0c9f13df2cf1..626f0f103c51 100644 --- a/salt/roster/terraform.py +++ b/salt/roster/terraform.py @@ -92,7 +92,9 @@ def _handle_old_salt_host_resource(resource): ret[MINION_ID] = attrs.get(MINION_ID) valid_attrs = set(attrs.keys()).intersection(TF_ROSTER_ATTRS.keys()) for attr in valid_attrs: - ret[attr] = _cast_output_to_type(attrs.get(attr), TF_ROSTER_ATTRS.get(attr)) + ret[attr] = _cast_output_to_type( + attr, attrs.get(attr), TF_ROSTER_ATTRS.get(attr) + ) return ret @@ -110,7 +112,9 @@ def _handle_new_salt_host_resource(resource): ret[MINION_ID] = attrs.get(MINION_ID) valid_attrs = set(attrs.keys()).intersection(TF_ROSTER_ATTRS.keys()) for attr in valid_attrs: - ret[attr] = _cast_output_to_type(attrs.get(attr), TF_ROSTER_ATTRS.get(attr)) + ret[attr] = _cast_output_to_type( + attr, attrs.get(attr), TF_ROSTER_ATTRS.get(attr) + ) log.info(ret) rets.append(ret) return rets @@ -134,8 +138,16 @@ def _add_ssh_key(ret): ret["priv"] = priv -def _cast_output_to_type(value, typ): +def _cast_output_to_type(attr, value, typ): """cast the value depending on the terraform type""" + if value is None: + # Timeout needs to default to 0 if the value is None + # The ssh command that is run cannot handle `-o ConnectTimeout=None` + if attr == "timeout": + return 0 + else: + return value + if value is None: return value if typ == "b": diff --git a/salt/runner.py b/salt/runner.py index caf6471fa081..2a19636b8edf 100644 --- a/salt/runner.py +++ b/salt/runner.py @@ -207,7 +207,7 @@ def print_docs(self): print(docs[fun]) # TODO: move to mixin whenever we want a salt-wheel cli - def run(self): + def run(self, full_return=False): """ Execute the runner sequence """ @@ -306,6 +306,7 @@ def run(self): tag=async_pub["tag"], jid=async_pub["jid"], daemonize=False, + full_return=full_return, ) except salt.exceptions.SaltException as exc: with salt.utils.event.get_event("master", opts=self.opts) as evt: diff --git a/salt/runners/manage.py b/salt/runners/manage.py index 9dc67ed72823..3a0a201a4ef1 100644 --- a/salt/runners/manage.py +++ b/salt/runners/manage.py @@ -223,7 +223,7 @@ def list_state(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -253,7 +253,7 @@ def list_not_state(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -285,7 +285,7 @@ def present(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -308,7 +308,7 @@ def not_present(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -331,7 +331,7 @@ def joined(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -354,7 +354,7 @@ def not_joined(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -377,7 +377,7 @@ def allowed(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -400,7 +400,7 @@ def not_allowed(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -423,7 +423,7 @@ def alived(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -446,7 +446,7 @@ def not_alived(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -469,7 +469,7 @@ def reaped(subset=None, show_ip=False): detection (no commands will be sent to minions) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. @@ -492,7 +492,7 @@ def not_reaped(subset=None, show_ip=False): detection (no commands will be sent) subset : None - Pass in a CIDR range to filter minions by IP address. + Pass in a list of minion ids. show_ip : False Also show the IP address each minion is connecting from. diff --git a/salt/runners/nacl.py b/salt/runners/nacl.py index d0291871f8d4..3c097604badf 100644 --- a/salt/runners/nacl.py +++ b/salt/runners/nacl.py @@ -1,7 +1,7 @@ """ This module helps include encrypted passwords in pillars, grains and salt state files. -:depends: libnacl, https://github.com/saltstack/libnacl +:depends: PyNaCl, https://github.com/pyca/pynacl This is often useful if you wish to store your pillars in source control or share your pillar data with others that you trust. I don't advise making your pillars public @@ -125,7 +125,7 @@ def __virtual__(): def keygen(sk_file=None, pk_file=None, **kwargs): """ - Use libnacl to generate a keypair. + Use PyNaCL to generate a keypair. If no `sk_file` is defined return a keypair. diff --git a/salt/runners/smartos_vmadm.py b/salt/runners/smartos_vmadm.py index 052c70b60175..7575fdbaf184 100644 --- a/salt/runners/smartos_vmadm.py +++ b/salt/runners/smartos_vmadm.py @@ -383,6 +383,3 @@ def is_running(search): salt-run vmadm.is_running search='alias=julia' """ return _action("is_running", search, False) - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/runners/state.py b/salt/runners/state.py index 5642204ce990..3baf1b86346a 100644 --- a/salt/runners/state.py +++ b/salt/runners/state.py @@ -86,7 +86,7 @@ def orchestrate( Runner uses the pillar variable - .. versionchanged:: 2017.5 + .. versionchanged:: 2017.5.0 Runner uses the pillar_enc variable that allows renderers to render the pillar. This is usable when supplying the contents of a file as pillar, and the file contains @@ -101,6 +101,16 @@ def orchestrate( salt-run state.orchestrate webserver pillar_enc=gpg pillar="$(cat somefile.json)" """ + + try: + orig_user = __opts__["user"] + __opts__["user"] = __user__ + log.debug( + f"changed opts user from original '{orig_user}' to global user '{__user__}'" + ) + except NameError: + log.debug("unable to find global user __user__") + if pillar is not None and not isinstance(pillar, dict): raise SaltInvocationError("Pillar data must be formatted as a dictionary") __opts__["file_client"] = "local" diff --git a/salt/scripts.py b/salt/scripts.py index 219bdfbca3e9..176cab56366b 100644 --- a/salt/scripts.py +++ b/salt/scripts.py @@ -1,8 +1,7 @@ """ This module contains the function calls to execute command line scripts """ - - +import contextlib import functools import logging import os @@ -588,14 +587,54 @@ def salt_unity(): s_fun() +def _pip_args(args, target): + new_args = args[:] + target_in_args = False + for arg in args: + if "--target" in arg: + target_in_args = True + if "install" in args and not target_in_args: + new_args.append(f"--target={target}") + return new_args + + +def _pip_environment(env, extras): + new_env = env.copy() + if "PYTHONPATH" in env: + new_env["PYTHONPATH"] = f"{extras}{os.pathsep}{env['PYTHONPATH']}" + else: + new_env["PYTHONPATH"] = extras + return new_env + + +def _get_onedir_env_path(): + # This function only exists to simplify testing. + with contextlib.suppress(AttributeError): + return sys.RELENV + return None + + def salt_pip(): """ Proxy to current python's pip """ + relenv_path = _get_onedir_env_path() + if relenv_path is None: + print( + "'salt-pip' is only meant to be used from a Salt onedir. You probably " + "want to use the system 'pip` binary.", + file=sys.stderr, + flush=True, + ) + sys.exit(salt.defaults.exitcodes.EX_GENERIC) + else: + extras = str(relenv_path / "extras-{}.{}".format(*sys.version_info)) + env = _pip_environment(os.environ.copy(), extras) + args = _pip_args(sys.argv[1:], extras) command = [ sys.executable, "-m", "pip", - ] + sys.argv[1:] - ret = subprocess.run(command, shell=False, check=False) + ] + _pip_args(sys.argv[1:], extras) + ret = subprocess.run(command, shell=False, check=False, env=env) sys.exit(ret.returncode) diff --git a/salt/state.py b/salt/state.py index ff40d2023149..dfa64adae0e7 100644 --- a/salt/state.py +++ b/salt/state.py @@ -11,17 +11,16 @@ } """ - import copy import datetime import fnmatch import importlib +import inspect import logging import os import random import re import site -import sys import time import traceback @@ -83,6 +82,7 @@ "fun", "state", "check_cmd", + "cmd_opts_exclude", "failhard", "onlyif", "unless", @@ -449,145 +449,165 @@ def verify_high(self, high): if not isinstance(high, dict): errors.append("High data is not a dictionary and is invalid") reqs = OrderedDict() - for name, body in high.items(): - if name.startswith("__"): - continue - if not isinstance(name, str): - errors.append( - "ID '{}' in SLS '{}' is not formed as a string, but is a {}".format( - name, body["__sls__"], type(name).__name__ - ) - ) - if not isinstance(body, dict): - err = "The type {} in {} is not formatted as a dictionary".format( - name, body - ) - errors.append(err) - continue - for state in body: - if state.startswith("__"): - continue - if not isinstance(body[state], list): + if not errors: + for name, body in high.items(): + try: + if name.startswith("__"): + continue + except (AttributeError, TypeError): + # Do not traceback on non string state ID + # handle the error properly + pass + + if not isinstance(name, str): errors.append( - "State '{}' in SLS '{}' is not formed as a list".format( - name, body["__sls__"] + "ID '{}' in SLS '{}' is not formed as a string, but is a {}. It may need to be quoted".format( + name, body["__sls__"], type(name).__name__ ) ) - else: - fun = 0 - if "." in state: - fun += 1 - for arg in body[state]: - if isinstance(arg, str): + if not isinstance(body, dict): + err = "The type {} in {} is not formatted as a dictionary".format( + name, body + ) + errors.append(err) + continue + for state in body: + if state.startswith("__"): + continue + if not isinstance(body[state], list): + errors.append( + "State '{}' in SLS '{}' is not formed as a list".format( + name, body["__sls__"] + ) + ) + else: + fun = 0 + if "." in state: + # This should not happen usually since `pad_funcs` + # is run on rendered templates fun += 1 - if " " in arg.strip(): - errors.append( - 'The function "{}" in state ' - '"{}" in SLS "{}" has ' - "whitespace, a function with whitespace is " - "not supported, perhaps this is an argument " - 'that is missing a ":"'.format( - arg, name, body["__sls__"] - ) - ) - elif isinstance(arg, dict): - # The arg is a dict, if the arg is require or - # watch, it must be a list. - # - # Add the requires to the reqs dict and check them - # all for recursive requisites. - argfirst = next(iter(arg)) - if argfirst in ("require", "watch", "prereq", "onchanges"): - if not isinstance(arg[argfirst], list): + for arg in body[state]: + if isinstance(arg, str): + fun += 1 + if " " in arg.strip(): errors.append( - "The {} statement in state '{}' in SLS '{}' " - "needs to be formed as a list".format( - argfirst, name, body["__sls__"] - ) + f'The function "{arg}" in state ' + f'"{name}" in SLS "{body["__sls__"]}" has ' + "whitespace, a function with whitespace is " + "not supported, perhaps this is an argument" + ' that is missing a ":"' ) - # It is a list, verify that the members of the - # list are all single key dicts. - else: - reqs[name] = {"state": state} - for req in arg[argfirst]: - if isinstance(req, str): - req = {"id": req} - if not isinstance(req, dict): - errors.append( - "Requisite declaration {} in SLS {} " - "is not formed as a single key " - "dictionary".format( - req, body["__sls__"] - ) + elif isinstance(arg, dict): + # The arg is a dict, if the arg is require or + # watch, it must be a list. + # + # Add the requires to the reqs dict and check them + # all for recursive requisites. + argfirst = next(iter(arg)) + if argfirst in ( + "require", + "watch", + "prereq", + "onchanges", + ): + if not isinstance(arg[argfirst], list): + errors.append( + "The {} statement in state '{}' in SLS '{}' " + "needs to be formed as a list".format( + argfirst, name, body["__sls__"] ) - continue - req_key = next(iter(req)) - req_val = req[req_key] - if "." in req_key: - errors.append( - "Invalid requisite type '{}' " - "in state '{}', in SLS " - "'{}'. Requisite types must " - "not contain dots, did you " - "mean '{}'?".format( - req_key, - name, - body["__sls__"], - req_key[: req_key.find(".")], + ) + # It is a list, verify that the members of the + # list are all single key dicts. + else: + reqs[name] = {"state": state} + for req in arg[argfirst]: + if isinstance(req, str): + req = {"id": req} + if not isinstance(req, dict): + errors.append( + "Requisite declaration {} in SLS {} " + "is not formed as a single key " + "dictionary".format( + req, body["__sls__"] + ) ) - ) - if not ishashable(req_val): - errors.append( - 'Illegal requisite "{}", is SLS {}\n'.format( - str(req_val), - body["__sls__"], + continue + req_key = next(iter(req)) + req_val = req[req_key] + if "." in req_key: + errors.append( + "Invalid requisite type '{}' " + "in state '{}', in SLS " + "'{}'. Requisite types must " + "not contain dots, did you " + "mean '{}'?".format( + req_key, + name, + body["__sls__"], + req_key[: req_key.find(".")], + ) ) - ) - continue + if not ishashable(req_val): + errors.append( + 'Illegal requisite "{}", is SLS {}\n'.format( + str(req_val), + body["__sls__"], + ) + ) + continue - # Check for global recursive requisites - reqs[name][req_val] = req_key - # I am going beyond 80 chars on - # purpose, this is just too much - # of a pain to deal with otherwise - if req_val in reqs: - if name in reqs[req_val]: - if reqs[req_val][name] == state: - if ( - reqs[req_val]["state"] - == reqs[name][req_val] - ): - errors.append( - "A recursive requisite was" - ' found, SLS "{}" ID "{}"' - ' ID "{}"'.format( - body["__sls__"], - name, - req_val, + # Check for global recursive requisites + reqs[name][req_val] = req_key + # I am going beyond 80 chars on + # purpose, this is just too much + # of a pain to deal with otherwise + if req_val in reqs: + if name in reqs[req_val]: + if reqs[req_val][name] == state: + if ( + reqs[req_val]["state"] + == reqs[name][req_val] + ): + errors.append( + "A recursive requisite was" + ' found, SLS "{}" ID "{}"' + ' ID "{}"'.format( + body["__sls__"], + name, + req_val, + ) ) - ) - # Make sure that there is only one key in the - # dict - if len(list(arg)) != 1: - errors.append( - "Multiple dictionaries defined in argument " - "of state '{}' in SLS '{}'".format( - name, body["__sls__"] + # Make sure that there is only one key in the + # dict + if len(list(arg)) != 1: + errors.append( + "Multiple dictionaries defined in argument " + "of state '{}' in SLS '{}'".format( + name, body["__sls__"] + ) ) - ) - if not fun: - if state == "require" or state == "watch": - continue - errors.append( - "No function declared in state '{}' in SLS '{}'".format( - state, body["__sls__"] + if not fun: + if state == "require" or state == "watch": + continue + errors.append( + f"No function declared in state '{name}' in SLS " + f"'{body['__sls__']}'" + ) + elif fun > 1: + funs = ( + [state.split(".", maxsplit=1)[1]] + if "." in state + else [] + ) + funs.extend( + arg for arg in body[state] if isinstance(arg, str) + ) + errors.append( + f"Too many functions declared in state '{name}' in " + f"SLS '{body['__sls__']}'. Please choose one of " + "the following: " + ", ".join(funs) ) - ) - elif fun > 1: - errors.append( - "Too many functions declared in state '{}' in " - "SLS '{}'".format(state, body["__sls__"]) - ) return errors def order_chunks(self, chunks): @@ -738,6 +758,7 @@ def __init__( mocked=False, loader="states", initial_pillar=None, + file_client=None, ): self._init_kwargs = { "opts": opts, @@ -754,6 +775,12 @@ def __init__( if "grains" not in opts: opts["grains"] = salt.loader.grains(opts) self.opts = opts + if file_client: + self.file_client = file_client + self.preserve_file_client = True + else: + self.file_client = salt.fileclient.get_file_client(self.opts) + self.preserve_file_client = False self.proxy = proxy self._pillar_override = pillar_override if pillar_enc is not None: @@ -778,7 +805,11 @@ def __init__( self.opts.get("pillar_merge_lists", False), ) log.debug("Finished gathering pillar data for state run") - self.state_con = context or {} + if context is None: + self.state_con = {} + else: + self.state_con = context + self.state_con["fileclient"] = self.file_client self.load_modules() self.active = set() self.mod_init = set() @@ -971,10 +1002,18 @@ def _run_check(self, low_data): "timeout", "success_retcodes", ) + if "cmd_opts_exclude" in low_data: + if not isinstance(low_data["cmd_opts_exclude"], list): + cmd_opts_exclude = [low_data["cmd_opts_exclude"]] + else: + cmd_opts_exclude = low_data["cmd_opts_exclude"] + else: + cmd_opts_exclude = [] for run_cmd_arg in POSSIBLE_CMD_ARGS: - cmd_opts[run_cmd_arg] = low_data.get(run_cmd_arg) + if run_cmd_arg not in cmd_opts_exclude: + cmd_opts[run_cmd_arg] = low_data.get(run_cmd_arg) - if "shell" in low_data: + if "shell" in low_data and "shell" not in cmd_opts_exclude: cmd_opts["shell"] = low_data["shell"] elif "shell" in self.opts["grains"]: cmd_opts["shell"] = self.opts["grains"].get("shell") @@ -1248,6 +1287,7 @@ def _load_states(self): self.serializers, context=self.state_con, proxy=self.proxy, + file_client=salt.fileclient.ContextlessFileClient(self.file_client), ) def load_modules(self, data=None, proxy=None): @@ -1257,7 +1297,11 @@ def load_modules(self, data=None, proxy=None): log.info("Loading fresh modules for state activity") self.utils = salt.loader.utils(self.opts) self.functions = salt.loader.minion_mods( - self.opts, self.state_con, utils=self.utils, proxy=self.proxy + self.opts, + self.state_con, + utils=self.utils, + proxy=self.proxy, + file_client=salt.fileclient.ContextlessFileClient(self.file_client), ) if isinstance(data, dict): if data.get("provider", False): @@ -1486,17 +1530,21 @@ def verify_high(self, high): else: fun = 0 if "." in state: + # This should not happen usually since `_handle_state_decls` + # is run on rendered templates fun += 1 for arg in body[state]: if isinstance(arg, str): fun += 1 if " " in arg.strip(): errors.append( - 'The function "{}" in state "{}" in SLS "{}" has ' - "whitespace, a function with whitespace is not " - "supported, perhaps this is an argument that is " - 'missing a ":"'.format(arg, name, body["__sls__"]) + f'The function "{arg}" in state ' + f'"{name}" in SLS "{body["__sls__"]}" has ' + "whitespace, a function with whitespace is " + "not supported, perhaps this is an argument" + ' that is missing a ":"' ) + elif isinstance(arg, dict): # The arg is a dict, if the arg is require or # watch, it must be a list. @@ -1589,14 +1637,16 @@ def verify_high(self, high): if state == "require" or state == "watch": continue errors.append( - "No function declared in state '{}' in SLS '{}'".format( - state, body["__sls__"] - ) + f"No function declared in state '{name}' in SLS " + f"'{body['__sls__']}'" ) elif fun > 1: + funs = [state.split(".", maxsplit=1)[1]] if "." in state else [] + funs.extend(arg for arg in body[state] if isinstance(arg, str)) errors.append( - "Too many functions declared in state '{}' in " - "SLS '{}'".format(state, body["__sls__"]) + f"Too many functions declared in state '{name}' in " + f"SLS '{body['__sls__']}'. Please choose one of " + "the following: " + ", ".join(funs) ) return errors @@ -2131,9 +2181,7 @@ def _call_parallel_target(cls, instance, init_kwargs, name, cdata, low): if "retry" in low: retries = 1 low["retry"] = instance.verify_retry_data(low["retry"]) - if not sys.modules[instance.states[cdata["full"]].__module__].__opts__[ - "test" - ]: + if not instance.states.opts["test"]: while low["retry"]["attempts"] >= retries: if low["retry"]["until"] == ret["result"]: @@ -2292,6 +2340,7 @@ def call(self, low, chunks=None, running=None, retries=1): initial_ret={"full": state_func_name}, expected_extra_kws=STATE_INTERNAL_KEYWORDS, ) + inject_globals = { # Pass a copy of the running dictionary, the low state chunks and # the current state dictionaries. @@ -2301,6 +2350,7 @@ def call(self, low, chunks=None, running=None, retries=1): "__running__": immutabletypes.freeze(running) if running else {}, "__instance_id__": self.instance_id, "__lowstate__": immutabletypes.freeze(chunks) if chunks else {}, + "__user__": self.opts.get("user", "UNKNOWN"), } if "__env__" in low: @@ -2310,8 +2360,8 @@ def call(self, low, chunks=None, running=None, retries=1): inject_globals.update(self.inject_globals) if low.get("__prereq__"): - test = sys.modules[self.states[cdata["full"]].__module__].__opts__["test"] - sys.modules[self.states[cdata["full"]].__module__].__opts__["test"] = True + test = self.states.opts["test"] + self.states.opts["test"] = True try: # Let's get a reference to the salt environment to use within this # state call. @@ -2374,11 +2424,15 @@ def call(self, low, chunks=None, running=None, retries=1): *cdata["args"], **cdata["kwargs"] ) self.states.inject_globals = {} - if ( - "check_cmd" in low - and "{0[state]}.mod_run_check_cmd".format(low) not in self.states - ): - ret.update(self._run_check_cmd(low)) + if "check_cmd" in low: + state_check_cmd = "{0[state]}.mod_run_check_cmd".format(low) + state_func = "{0[state]}.{0[fun]}".format(low) + state_func_sig = inspect.signature(self.states[state_func]) + if state_check_cmd not in self.states: + ret.update(self._run_check_cmd(low)) + else: + if "check_cmd" not in state_func_sig.parameters: + ret.update(self._run_check_cmd(low)) except Exception as exc: # pylint: disable=broad-except log.debug( "An exception occurred in this state: %s", @@ -2403,10 +2457,7 @@ def call(self, low, chunks=None, running=None, retries=1): } finally: if low.get("__prereq__"): - sys.modules[self.states[cdata["full"]].__module__].__opts__[ - "test" - ] = test - + self.states.opts["test"] = test self.state_con.pop("runas", None) self.state_con.pop("runas_password", None) @@ -2447,7 +2498,7 @@ def call(self, low, chunks=None, running=None, retries=1): ) if "retry" in low and "parallel" not in low: low["retry"] = self.verify_retry_data(low["retry"]) - if not sys.modules[self.states[cdata["full"]].__module__].__opts__["test"]: + if not self.states.opts["test"]: if low["retry"]["until"] != ret["result"]: if low["retry"]["attempts"] > retries: interval = low["retry"]["interval"] @@ -3637,6 +3688,16 @@ def call_template_str(self, template): return errors return self.call_high(high) + def destroy(self): + if not self.preserve_file_client: + self.file_client.close() + + def __enter__(self): + return self + + def __exit__(self, *_): + self.destroy() + class LazyAvailStates: """ @@ -4882,9 +4943,15 @@ def __init__( mocked=False, loader="states", initial_pillar=None, + file_client=None, ): self.opts = opts - self.client = salt.fileclient.get_file_client(self.opts) + if file_client: + self.client = file_client + self.preserve_client = True + else: + self.client = salt.fileclient.get_file_client(self.opts) + self.preserve_client = False BaseHighState.__init__(self, opts) self.state = State( self.opts, @@ -4896,6 +4963,7 @@ def __init__( mocked=mocked, loader=loader, initial_pillar=initial_pillar, + file_client=self.client, ) self.matchers = salt.loader.matchers(self.opts) self.proxy = proxy @@ -4930,7 +4998,8 @@ def get_active(cls): return None def destroy(self): - self.client.destroy() + if not self.preserve_client: + self.client.destroy() def __enter__(self): return self diff --git a/salt/states/acme.py b/salt/states/acme.py index 82423c309b0e..6cd9e165f6cf 100644 --- a/salt/states/acme.py +++ b/salt/states/acme.py @@ -2,7 +2,7 @@ ACME / Let's Encrypt certificate management state ================================================= -.. versionadded:: 2016.3 +.. versionadded:: 2016.3.0 See also the module documentation diff --git a/salt/states/ansiblegate.py b/salt/states/ansiblegate.py index 4de7fb096d84..ec8913dee59a 100644 --- a/salt/states/ansiblegate.py +++ b/salt/states/ansiblegate.py @@ -32,20 +32,31 @@ - state: installed """ - import logging import os import sys -# Import salt modules import salt.fileclient import salt.utils.decorators.path +from salt.loader.dunder import __file_client__ from salt.utils.decorators import depends log = logging.getLogger(__name__) __virtualname__ = "ansible" +def _file_client(): + """ + Return a file client + + If the __file_client__ context is set return it, otherwize create a new + file client using __opts__. + """ + if __file_client__: + return __file_client__.value() + return salt.fileclient.get_file_client(__opts__) + + @depends("ansible") class AnsibleState: """ @@ -108,13 +119,6 @@ def __virtual__(): return __virtualname__ -def _client(): - """ - Get a fileclient - """ - return salt.fileclient.get_file_client(__opts__) - - def _changes(plays): """ Find changes in ansible return data @@ -171,7 +175,8 @@ def playbooks(name, rundir=None, git_repo=None, git_kwargs=None, ansible_kwargs= } if git_repo: if not isinstance(rundir, str) or not os.path.isdir(rundir): - rundir = _client()._extrn_path(git_repo, "base") + with _file_client() as client: + rundir = client._extrn_path(git_repo, "base") log.trace("rundir set to %s", rundir) if not isinstance(git_kwargs, dict): log.debug("Setting git_kwargs to empty dict: %s", git_kwargs) diff --git a/salt/states/apache.py b/salt/states/apache.py index 058097a49e0d..8b340b7de487 100644 --- a/salt/states/apache.py +++ b/salt/states/apache.py @@ -36,7 +36,7 @@ - FollowSymlinks AllowOverride: All -.. versionchanged:: 2018.3 +.. versionchanged:: 2018.3.0 Allows having the same section container multiple times (e.g. ). diff --git a/salt/states/at.py b/salt/states/at.py index 2e1dec6ad4d2..a4065314552a 100644 --- a/salt/states/at.py +++ b/salt/states/at.py @@ -296,6 +296,3 @@ def mod_watch(name, **kwargs): ret = present(**kwargs) return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/states/chocolatey.py b/salt/states/chocolatey.py index f2e0651ae60f..87dad46a751c 100644 --- a/salt/states/chocolatey.py +++ b/salt/states/chocolatey.py @@ -1,5 +1,5 @@ """ -Manage Chocolatey package installs +Manage Windows Packages using Chocolatey .. versionadded:: 2016.3.0 .. note:: @@ -20,7 +20,7 @@ def __virtual__(): """ if "chocolatey.install" in __salt__: return "chocolatey" - return (False, "chocolatey module could not be loaded") + return False, "chocolatey module could not be loaded" def installed( @@ -45,50 +45,55 @@ def installed( The name of the package to be installed. Required. version (str): - Install a specific version of the package. Defaults to latest - version. If the version is different to the one installed then the - specified version will be installed. Default is None. + Install a specific version of the package. Defaults to the latest + version. If the version is different to the one installed, then the + specified version will be installed. Default is ``None``. source (str): Chocolatey repository (directory, share or remote URL, feed). - Defaults to the official Chocolatey feed. Default is None. + ``None`` defaults to the official Chocolatey feed. Default is + ``None``. force (bool): Reinstall the current version of an existing package. Do not use - with ``allow_multiple``. Default is False. + with ``allow_multiple``. Default is ``False``. pre_versions (bool): - Include pre-release packages. Default is False. + Include pre-release packages. Default is ``False``. install_args (str): - Install arguments you want to pass to the installation process, i.e - product key or feature list. Default is None. + Install arguments you want to pass to the installation process, i.e. + product key or feature list. Default is ``None``. override_args (bool): - Set to True if you want to override the original install arguments - (for the native installer) in the package and use your own. When - this is set to False install_args will be appended to the end of the - default arguments. Default is False. + Set to ``True`` to override the original install arguments (for the + native installer) in the package and use your own. When this is set + to ``False``, install_args will be appended to the end of the + default arguments. Default is ``False``. force_x86 (bool): - Force x86 (32bit) installation on 64 bit systems. Default is False. + Force x86 (32bit) installation on 64bit systems. Default is + ``False``. package_args (str): - Arguments you want to pass to the package. Default is None. + Arguments you want to pass to the package. Default is ``None``. allow_multiple (bool): - Allow mulitiple versions of the package to be installed. Do not use - with ``force``. Does not work with all packages. Default is False. + Allow multiple versions of the package to be installed. Do not use + with ``force``. Does not work with all packages. Default is + ``False``. .. versionadded:: 2017.7.0 execution_timeout (str): Chocolatey execution timeout value you want to pass to the - installation process. Default is None. + installation process. Default is ``None``. + + Example: .. code-block:: yaml - Installsomepackage: + install_some_package: chocolatey.installed: - name: packagename - version: '12.04' @@ -109,9 +114,9 @@ def installed( # Package not installed if name.lower() not in [package.lower() for package in pre_install.keys()]: if version: - ret["changes"] = {name: "Version {} will be installed".format(version)} + ret["comment"] = f"{name} {version} will be installed" else: - ret["changes"] = {name: "Latest version will be installed"} + ret["comment"] = f"Latest version of {name} will be installed" # Package installed else: @@ -124,59 +129,36 @@ def installed( if name.lower() == pkg.lower(): full_name = pkg - installed_version = version_info[full_name]["installed"][0] + installed_version = version_info[full_name].get("installed")[0] if version: if salt.utils.versions.compare( ver1=installed_version, oper="==", ver2=version ): if force: - ret["changes"] = { - name: "Version {} will be reinstalled".format(version) - } - ret["comment"] = "Reinstall {} {}".format(full_name, version) + ret["comment"] = f"{name} {version} will be reinstalled" else: - ret["comment"] = "{} {} is already installed".format(name, version) - if __opts__["test"]: - ret["result"] = None - return ret + ret["comment"] = f"{name} {version} is already installed" else: if allow_multiple: - ret["changes"] = { - name: ( - "Version {} will be installed side by side with " - "Version {} if supported".format(version, installed_version) - ) - } - ret["comment"] = "Install {0} {1} side-by-side with {0} {2}".format( - full_name, version, installed_version - ) + ret[ + "comment" + ] = f"{name} {version} will be installed side by side with {name} {installed_version} if supported" else: - ret["changes"] = { - name: "Version {} will be installed over Version {}".format( - version, installed_version - ) - } - ret["comment"] = "Install {0} {1} over {0} {2}".format( - full_name, version, installed_version - ) + ret[ + "comment" + ] = f"{name} {version} will be installed over {name} {installed_version}" force = True else: version = installed_version if force: - ret["changes"] = { - name: "Version {} will be reinstalled".format(version) - } - ret["comment"] = "Reinstall {} {}".format(full_name, version) + ret["comment"] = f"{name} {version} will be reinstalled" else: - ret["comment"] = "{} {} is already installed".format(name, version) - if __opts__["test"]: - ret["result"] = None + ret["comment"] = f"{name} {version} is already installed" return ret if __opts__["test"]: ret["result"] = None - ret["comment"] = "The installation was tested" return ret # Install the package @@ -195,13 +177,12 @@ def installed( ) if "Running chocolatey failed" not in result: + ret["comment"] = f"{name} installed successfully" ret["result"] = True else: + ret["comment"] = f"Failed to install {name}" ret["result"] = False - if not ret["result"]: - ret["comment"] = "Failed to install the package {}".format(name) - # Get list of installed packages after 'chocolatey.install' post_install = __salt__["chocolatey.list"](local_only=True) @@ -212,32 +193,35 @@ def installed( def uninstalled(name, version=None, uninstall_args=None, override_args=False): """ - Uninstalls a package + Uninstalls a chocolatey package + + Args: + + name (str): + The name of the package to be uninstalled. Required. - name - The name of the package to be uninstalled + version (str): + Uninstalls a specific version of the package. Defaults to the latest + version installed. - version - Uninstalls a specific version of the package. Defaults to latest - version installed. + uninstall_args (str): + A list of uninstall arguments you want to pass to the uninstallation + process, i.e. product key or feature list - uninstall_args - A list of uninstall arguments you want to pass to the uninstallation - process i.e product key or feature list + override_args (str): + Set to ``True`` if you want to override the original uninstall + arguments (for the native uninstaller) in the package and use your + own. When this is set to ``False``, uninstall_args will be appended + to the end of the default arguments - override_args - Set to true if you want to override the original uninstall arguments ( - for the native uninstaller)in the package and use your own. - When this is set to False uninstall_args will be appended to the end of - the default arguments + Example: .. code-block:: yaml - Removemypackage: + remove_my_package: chocolatey.uninstalled: - name: mypackage - version: '21.5' - """ ret = {"name": name, "result": True, "changes": {}, "comment": ""} @@ -248,20 +232,15 @@ def uninstalled(name, version=None, uninstall_args=None, override_args=False): # Determine if package is installed if name.lower() in [package.lower() for package in pre_uninstall.keys()]: try: - ret["changes"] = { - name: "{} version {} will be removed".format( - name, pre_uninstall[name][0] - ) - } + ret["comment"] = f"{name} {pre_uninstall[name][0]} will be removed" except KeyError: - ret["changes"] = {name: "{} will be removed".format(name)} + ret["comment"] = f"{name} will be removed" else: - ret["comment"] = "The package {} is not installed".format(name) + ret["comment"] = f"The package {name} is not installed" return ret if __opts__["test"]: ret["result"] = None - ret["comment"] = "The uninstall was tested" return ret # Uninstall the package @@ -270,13 +249,12 @@ def uninstalled(name, version=None, uninstall_args=None, override_args=False): ) if "Running chocolatey failed" not in result: + ret["comment"] = f"{name} uninstalled successfully" ret["result"] = True else: + ret["comment"] = f"Failed to uninstall {name}" ret["result"] = False - if not ret["result"]: - ret["comment"] = "Failed to uninstall the package {}".format(name) - # Get list of installed packages after 'chocolatey.uninstall' post_uninstall = __salt__["chocolatey.list"](local_only=True) @@ -297,7 +275,7 @@ def upgraded( package_args=None, ): """ - Upgrades a package. Will install the package if not installed. + Upgrades a chocolatey package. Will install the package if not installed. .. versionadded:: 2018.3.0 @@ -320,7 +298,7 @@ def upgraded( Default is ``False``. pre_versions (bool): - ``True`` will nclude pre-release packages. Default is ``False``. + ``True`` will include pre-release packages. Default is ``False``. install_args (str): Install arguments you want to pass to the installation process, i.e @@ -333,12 +311,14 @@ def upgraded( the default arguments. Default is ``False``. force_x86 (bool): - ``True`` forces 32bit installation on 64 bit systems. Default is + ``True`` forces 32bit installation on 64bit systems. Default is ``False``. package_args (str): Arguments you want to pass to the package. Default is ``None``. + Example: + .. code-block:: yaml upgrade_some_package: @@ -356,15 +336,15 @@ def upgraded( # Package not installed if name.lower() not in [package.lower() for package in pre_install.keys()]: if version: - ret["changes"][name] = "Version {} will be installed".format(version) - ret["comment"] = "Install version {}".format(version) + ret["comment"] = f"{name} {version} will be installed" else: - ret["changes"][name] = "Latest version will be installed" - ret["comment"] = "Install latest version" + ret["comment"] = f"Latest version of {name} will be installed" # Package installed else: - version_info = __salt__["chocolatey.version"](name, check_remote=True) + version_info = __salt__["chocolatey.version"]( + name=name, check_remote=True, source=source + ) # Get the actual full name out of version_info full_name = name @@ -385,40 +365,29 @@ def upgraded( ver1=installed_version, oper="==", ver2=version ): if force: - ret["changes"][name] = "Version {} will be reinstalled".format( - version - ) - ret["comment"] = "Reinstall {} {}".format(full_name, version) + ret["comment"] = f"{name} {version} will be reinstalled" else: - ret["comment"] = "{} {} is already installed".format( - name, installed_version - ) + ret["comment"] = f"{name} {version} is already installed" + return ret else: # If installed version is older than new version if salt.utils.versions.compare( ver1=installed_version, oper="<", ver2=version ): - ret["changes"][ - name - ] = "Version {} will be upgraded to Version {}".format( - installed_version, version - ) - ret["comment"] = "Upgrade {} {} to {}".format( - full_name, installed_version, version - ) + ret[ + "comment" + ] = f"{name} {installed_version} will be upgraded to version {version}" # If installed version is newer than new version else: - ret["comment"] = "{} {} (newer) is already installed".format( - name, installed_version - ) + ret[ + "comment" + ] = f"{name} {installed_version} (newer) is already installed" + return ret # Catch all for a condition where version is not passed and there is no # available version else: ret["comment"] = "No version found to install" - - # Return if there are no changes to be made - if not ret["changes"]: - return ret + return ret # Return if running in test mode if __opts__["test"]: @@ -439,10 +408,10 @@ def upgraded( ) if "Running chocolatey failed" not in result: - ret["comment"] = "Package {} upgraded successfully".format(name) + ret["comment"] = f"{name} upgraded successfully" ret["result"] = True else: - ret["comment"] = "Failed to upgrade the package {}".format(name) + ret["comment"] = f"Failed to upgrade {name}" ret["result"] = False # Get list of installed packages after 'chocolatey.install' @@ -460,33 +429,35 @@ def source_present( name, source_location, username=None, password=None, force=False, priority=None ): """ - Instructs Chocolatey to add a source if not already present. + Adds a Chocolatey source if not already present. + + Args: - name - The name of the source to be added as a chocolatey repository. + name (str): + The name of the source to be added as a chocolatey repository. - source - Location of the source you want to work with. + source (str): + Location of the source you want to work with. - username - Provide username for chocolatey sources that need authentication - credentials. + username (str): + The username for a chocolatey source that needs authentication + credentials. - password - Provide password for chocolatey sources that need authentication - credentials. + password (str): + The password for a chocolatey source that needx authentication + credentials. - force - Salt will not modify a existing repository with the same name. Set this - option to true to update an existing repository. + force (bool): + Salt will not modify an existing repository with the same name. Set + this option to ``True`` to update an existing repository. - priority - The priority order of this source as compared to other sources, - lower is better. Defaults to 0 (no priority). All priorities - above 0 will be evaluated first, then zero-based values will be - evaluated in config file order. + priority (int): + The priority order of this source as compared to other sources. + Lower is better. Defaults to 0 (no priority). All priorities + above 0 will be evaluated first, then zero-based values will be + evaluated in config file order. - CLI Example: + Example: .. code-block:: yaml @@ -506,21 +477,18 @@ def source_present( # Determine action # Source with same name not present if name.lower() not in [present.lower() for present in pre_install.keys()]: - ret["comment"] = "Add the source {}".format(name) + ret["comment"] = f"{name} will be added" # Source with same name already present else: if force: - ret["comment"] = "Update the source {}".format(name) + ret["comment"] = f"{name} will be updated" else: - ret["comment"] = "A source with the name {} is already present".format(name) - if __opts__["test"]: - ret["result"] = None + ret["comment"] = f"{name} is already present" return ret if __opts__["test"]: ret["result"] = None - ret["comment"] = "The installation was tested" return ret # Add the source @@ -534,10 +502,10 @@ def source_present( if "Running chocolatey failed" not in result: ret["result"] = True - ret["comment"] = "Source {} added successfully".format(name) + ret["comment"] = f"Source {name} added successfully" else: ret["result"] = False - ret["comment"] = "Failed to add the source {}".format(name) + ret["comment"] = f"Failed to add the source {name}" # Get list of present sources after 'chocolatey.add_source' post_install = __salt__["chocolatey.list_sources"]() diff --git a/salt/states/cmd.py b/salt/states/cmd.py index 2df04807ce33..5a859c8092c6 100644 --- a/salt/states/cmd.py +++ b/salt/states/cmd.py @@ -849,6 +849,7 @@ def run( if __opts__["test"] and not test_name: ret["result"] = None ret["comment"] = 'Command "{}" would have been executed'.format(name) + ret["changes"] = {"cmd": name} return _reinterpreted_state(ret) if stateful else ret if cwd and not os.path.isdir(cwd): diff --git a/salt/states/docker_image.py b/salt/states/docker_image.py index e5a253a2ef10..10bca898c3e9 100644 --- a/salt/states/docker_image.py +++ b/salt/states/docker_image.py @@ -95,6 +95,9 @@ def present( docker_image.present: - tag: mytag + name + The name of the docker image. + tag Tag name for the image. Required when using ``build``, ``load``, or ``sls`` to create the image, but optional if pulling from a repository. @@ -140,10 +143,14 @@ def present( .. versionchanged:: 2018.3.0 The ``tag`` must be manually specified using the ``tag`` argument. - force : False + force Set this parameter to ``True`` to force Salt to pull/build/load the image even if it is already present. + insecure_registry + If ``True``, the Docker client will permit the use of insecure + (non-HTTPS) registries. + client_timeout Timeout in seconds for the Docker client. This is not a timeout for the state, but for receiving a response from the API. @@ -206,6 +213,10 @@ def present( ``pillar_roots`` or an external Pillar source. .. versionadded:: 2018.3.0 + + kwargs + Additional keyword arguments to pass to + :py:func:`docker.build ` """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} @@ -369,6 +380,9 @@ def absent(name=None, images=None, force=False): specified either using ``repo:tag`` notation, or just the repo name (in which case a tag of ``latest`` is assumed). + name + The name of the docker image. + images Run this state on more than one image at a time. The following two examples accomplish the same thing: @@ -395,7 +409,7 @@ def absent(name=None, images=None, force=False): all the deletions in a single run, rather than executing the state separately on each image (as it would in the first example). - force : False + force Salt will fail to remove any images currently in use by a container. Set this option to true to remove the image even if it is already present. diff --git a/salt/states/file.py b/salt/states/file.py index 9f32151b8b1e..c78c5c24ab53 100644 --- a/salt/states/file.py +++ b/salt/states/file.py @@ -334,6 +334,15 @@ def run(): } +def _http_ftp_check(source): + """ + Check if source or sources is http, https or ftp. + """ + if isinstance(source, str): + return source.lower().startswith(("http:", "https:", "ftp:")) + return any([s.lower().startswith(("http:", "https:", "ftp:")) for s in source]) + + def _get_accumulator_filepath(): """ Return accumulator data path. @@ -430,7 +439,7 @@ def _gen_recurse_managed_files( exclude_pat=None, maxdepth=None, include_empty=False, - **kwargs + **kwargs, ): """ Generate the list of files managed by a recurse state @@ -712,6 +721,7 @@ def _check_directory( exclude_pat=None, max_depth=None, follow_symlinks=False, + children_only=False, ): """ Check what changes need to be made on a directory @@ -783,10 +793,12 @@ def _check_directory( ) if fchange: changes[path] = fchange - # Recurse skips root (we always do dirs, not root), so always check root: - fchange = _check_dir_meta(name, user, group, dir_mode, follow_symlinks) - if fchange: - changes[name] = fchange + # Recurse skips root (we always do dirs, not root), so check root unless + # children_only is specified: + if not children_only: + fchange = _check_dir_meta(name, user, group, dir_mode, follow_symlinks) + if fchange: + changes[name] = fchange if clean: keep = _gen_keep_files(name, require, walk_d) @@ -1333,7 +1345,7 @@ def hardlink( user=None, group=None, dir_mode=None, - **kwargs + **kwargs, ): """ Create a hard link @@ -1539,7 +1551,7 @@ def symlink( atomic=False, disallow_copy_and_unlink=False, inherit_user_and_group=False, - **kwargs + **kwargs, ): """ Create a symbolic link (symlink, soft link) @@ -1790,9 +1802,11 @@ def symlink( if __salt__["file.is_link"](name): # The link exists, verify that it matches the target - if os.path.normpath(__salt__["file.readlink"](name)) == os.path.normpath( + if os.path.normpath(__salt__["file.readlink"](name)) != os.path.normpath( target ): + __salt__["file.remove"](name) + else: if _check_symlink_ownership(name, user, group, win_owner): # The link looks good! if salt.utils.platform.is_windows(): @@ -1975,10 +1989,10 @@ def tidied( age_size_logical_operator="OR", age_size_only=None, rmlinks=True, - **kwargs + **kwargs, ): """ - .. versionchanged:: 3006.0,3005 + .. versionchanged:: 3005,3006.0 Remove unwanted files based on specific criteria. @@ -2294,7 +2308,7 @@ def managed( win_perms_reset=False, verify_ssl=True, use_etag=False, - **kwargs + **kwargs, ): r""" Manage a given file, this function allows for a file to be downloaded from @@ -2414,6 +2428,8 @@ def managed( - source: https://launchpad.net/tomdroid/beta/0.7.3/+download/tomdroid-src-0.7.3.tar.gz - source_hash: md5=79eef25f9b0b2c642c62b7f737d4f53f + source_hash is ignored if the file hosted is not on a HTTP, HTTPS or FTP server. + Known issues: If the remote server URL has the hash file as an apparent sub-directory of the source file, the module will discover that it @@ -2946,6 +2962,9 @@ def managed( "'contents_grains' is permitted", ) + if source is not None and not _http_ftp_check(source) and source_hash: + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") + # If no source is specified, set replace to False, as there is nothing # with which to replace the file. if not source and contents_count == 0 and replace: @@ -3191,7 +3210,7 @@ def managed( serange=serange, verify_ssl=verify_ssl, follow_symlinks=follow_symlinks, - **kwargs + **kwargs, ) if salt.utils.platform.is_windows(): @@ -3254,7 +3273,7 @@ def managed( skip_verify, verify_ssl=verify_ssl, use_etag=use_etag, - **kwargs + **kwargs, ) except Exception as exc: # pylint: disable=broad-except ret["changes"] = {} @@ -3309,7 +3328,7 @@ def managed( setype=setype, serange=serange, use_etag=use_etag, - **kwargs + **kwargs, ) except Exception as exc: # pylint: disable=broad-except ret["changes"] = {} @@ -3388,7 +3407,7 @@ def managed( setype=setype, serange=serange, use_etag=use_etag, - **kwargs + **kwargs, ) except Exception as exc: # pylint: disable=broad-except ret["changes"] = {} @@ -3476,7 +3495,7 @@ def directory( win_deny_perms=None, win_inheritance=True, win_perms_reset=False, - **kwargs + **kwargs, ): r""" Ensure that a named directory is present and has the right perms @@ -3938,6 +3957,7 @@ def directory( exclude_pat, max_depth, follow_symlinks, + children_only, ) if tchanges: @@ -4190,7 +4210,7 @@ def recurse( win_perms=None, win_deny_perms=None, win_inheritance=True, - **kwargs + **kwargs, ): """ Recurse through a subdirectory on the master and copy said subdirectory @@ -4479,10 +4499,8 @@ def recurse( srcpath, senv = salt.utils.url.parse(source) if senv is None: senv = __env__ - master_dirs = __salt__["cp.list_master_dirs"](saltenv=senv) - if srcpath not in master_dirs and not any( - x for x in master_dirs if x.startswith(srcpath + "/") - ): + master_dirs = __salt__["cp.list_master_dirs"](saltenv=senv, prefix=srcpath + "/") + if srcpath not in master_dirs: ret["result"] = False ret["comment"] = ( "The directory '{}' does not exist on the salt fileserver " @@ -4563,7 +4581,7 @@ def manage_file(path, source, replace): context=context, defaults=defaults, backup=backup, - **pass_kwargs + **pass_kwargs, ) merge_ret(path, _ret) @@ -5998,6 +6016,9 @@ def blockreplace( if not name: return _error(ret, "Must provide name to file.blockreplace") + if source is not None and not _http_ftp_check(source) and source_hash: + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") + if sources is None: sources = [] if source_hashes is None: @@ -6141,7 +6162,7 @@ def comment(name, regex, char="#", backup=".bak", ignore_missing=False): # remove (?i)-like flags, ^ and $ unanchor_regex = re.sub(r"^(\(\?[iLmsux]\))?\^?(.*?)\$?$", r"\2", regex) - uncomment_regex = r"^(?!\s*{}).*".format(char) + unanchor_regex + uncomment_regex = rf"^(?!\s*{char})\s*" + unanchor_regex comment_regex = char + unanchor_regex # Make sure the pattern appears in the file before continuing @@ -6434,6 +6455,9 @@ def append( if not name: return _error(ret, "Must provide name to file.append") + if source is not None and not _http_ftp_check(source) and source_hash: + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") + name = os.path.expanduser(name) if sources is None: @@ -6718,6 +6742,9 @@ def prepend( if not name: return _error(ret, "Must provide name to file.prepend") + if source is not None and not _http_ftp_check(source) and source_hash: + log.warning("source_hash is only used with 'http', 'https' or 'ftp'") + if sources is None: sources = [] @@ -6879,7 +6906,7 @@ def patch( reject_file=None, strip=None, saltenv=None, - **kwargs + **kwargs, ): """ Ensure that a patch has been applied to the specified file or directory @@ -7377,7 +7404,7 @@ def copy_( mode=None, dir_mode=None, subdir=False, - **kwargs + **kwargs, ): """ If the file defined by the ``source`` option exists on the minion, copy it @@ -7819,7 +7846,7 @@ def serialize( serializer=None, serializer_opts=None, deserializer_opts=None, - **kwargs + **kwargs, ): """ Serializes dataset and store it into managed file. Useful for sharing @@ -8155,7 +8182,7 @@ def serialize( saltenv=__env__, contents=contents, skip_verify=False, - **kwargs + **kwargs, ) if ret["changes"]: @@ -8536,7 +8563,7 @@ def shortcut( backupname=None, makedirs=False, user=None, - **kwargs + **kwargs, ): """ Create a Windows shortcut @@ -8937,6 +8964,25 @@ def cached( else: source_sum = {} + if __opts__["test"]: + local_copy = __salt__["cp.is_cached"](name, saltenv=saltenv) + if local_copy: + if source_sum: + hash = __salt__["file.get_hash"](local_copy, __opts__["hash_type"]) + if hash == source_sum["hsum"]: + ret["comment"] = "File already cached: {}".format(name) + else: + ret[ + "comment" + ] = "Hashes don't match.\nFile will be cached: {}".format(name) + else: + ret["comment"] = "No hash found. File will be cached: {}".format(name) + else: + ret["comment"] = "File will be cached: {}".format(name) + ret["changes"] = {} + ret["result"] = None + return ret + if parsed.scheme in salt.utils.files.LOCAL_PROTOS: # Source is a local file path full_path = os.path.realpath(os.path.expanduser(parsed.path)) diff --git a/salt/states/logadm.py b/salt/states/logadm.py index 53382dd6fec3..d2b33e32b4f7 100644 --- a/salt/states/logadm.py +++ b/salt/states/logadm.py @@ -161,6 +161,3 @@ def remove(name, log_file=None): ret["comment"] = "No configuration for {} present.".format(log_file) return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/states/lvm.py b/salt/states/lvm.py index 72470a9b07fc..b48bf6dde3aa 100644 --- a/salt/states/lvm.py +++ b/salt/states/lvm.py @@ -274,7 +274,7 @@ def lv_present( force Assume yes to all prompts - .. versionadded:: 3002.0 + .. versionadded:: 3002 resizefs Use fsadm to resize the logical volume filesystem if needed diff --git a/salt/states/openvswitch_bridge.py b/salt/states/openvswitch_bridge.py index 4ee3b3ef8807..13c4109598de 100644 --- a/salt/states/openvswitch_bridge.py +++ b/salt/states/openvswitch_bridge.py @@ -22,11 +22,11 @@ def present(name, parent=None, vlan=None): parent : string name of the parent bridge (if the bridge shall be created as a fake bridge). If specified, vlan must also be specified. - .. versionadded:: 3006 + .. versionadded:: 3006.0 vlan: int VLAN ID of the bridge (if the bridge shall be created as a fake bridge). If specified, parent must also be specified. - .. versionadded:: 3006 + .. versionadded:: 3006.0 """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} diff --git a/salt/states/openvswitch_db.py b/salt/states/openvswitch_db.py index c874ca2b53f3..22ee5f8822e8 100644 --- a/salt/states/openvswitch_db.py +++ b/salt/states/openvswitch_db.py @@ -1,7 +1,7 @@ """ Management of Open vSwitch database records. -.. versionadded:: 3006 +.. versionadded:: 3006.0 """ diff --git a/salt/states/pcs.py b/salt/states/pcs.py index 6cc1e3eabf68..cd124c1a82aa 100644 --- a/salt/states/pcs.py +++ b/salt/states/pcs.py @@ -5,7 +5,7 @@ A state module to manage Pacemaker/Corosync clusters with the Pacemaker/Corosync configuration system (PCS) -.. versionadded:: 2016.110 +.. versionadded:: 2016.11.0 :depends: pcs diff --git a/salt/states/pdbedit.py b/salt/states/pdbedit.py index 15d7cdd17530..8a2848f4d392 100644 --- a/salt/states/pdbedit.py +++ b/salt/states/pdbedit.py @@ -141,6 +141,3 @@ def present(name, **kwargs): Alias for pdbedit.managed """ return managed(name, **kwargs) - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/states/pip_state.py b/salt/states/pip_state.py index 542a7f6c751b..9430ea457ce0 100644 --- a/salt/states/pip_state.py +++ b/salt/states/pip_state.py @@ -114,7 +114,7 @@ def pip_has_exceptions_mod(ver): # pylint: enable=import-error -log = logging.getLogger(__name__) +logger = logging.getLogger(__name__) # Define the module's virtual name __virtualname__ = "pip" @@ -174,7 +174,7 @@ def _check_pkg_version_format(pkg): if not HAS_PIP: ret["comment"] = ( - "An importable Python 2 pip module is required but could not be " + "An importable Python pip module is required but could not be " "found on your system. This usually means that the system's pip " "package is not installed properly." ) @@ -189,10 +189,10 @@ def _check_pkg_version_format(pkg): # vcs+URL urls are not properly parsed. # The next line is meant to trigger an AttributeError and # handle lower pip versions - log.debug("Installed pip version: %s", pip.__version__) + logger.debug("Installed pip version: %s", pip.__version__) install_req = _from_line(pkg) except AttributeError: - log.debug("Installed pip version is lower than 1.2") + logger.debug("Installed pip version is lower than 1.2") supported_vcs = ("git", "svn", "hg", "bzr") if pkg.startswith(supported_vcs): for vcs in supported_vcs: @@ -251,7 +251,7 @@ def _check_if_installed( index_url, extra_index_url, pip_list=False, - **kwargs + **kwargs, ): """ Takes a package name and version specification (if any) and checks it is @@ -351,7 +351,7 @@ def _pep440_version_cmp(pkg1, pkg2, ignore_epoch=False): making the comparison. """ if HAS_PKG_RESOURCES is False: - log.warning( + logger.warning( "The pkg_resources packages was not loaded. Please install setuptools." ) return None @@ -367,7 +367,9 @@ def _pep440_version_cmp(pkg1, pkg2, ignore_epoch=False): if pkg_resources.parse_version(pkg1) > pkg_resources.parse_version(pkg2): return 1 except Exception as exc: # pylint: disable=broad-except - log.exception(exc) + logger.exception( + f'Comparison of package versions "{pkg1}" and "{pkg2}" failed: {exc}' + ) return None @@ -418,7 +420,7 @@ def installed( cache_dir=None, no_binary=None, extra_args=None, - **kwargs + **kwargs, ): """ Make sure the package is installed @@ -818,6 +820,13 @@ def installed( ret["comment"] = "\n".join(comments) return ret + # If the user does not exist, stop here with error: + if user and "user.info" in __salt__ and not __salt__["user.info"](user): + # The user does not exists, exit with result set to False + ret["result"] = False + ret["comment"] = f"User {user} does not exist" + return ret + # If a requirements file is specified, only install the contents of the # requirements file. Similarly, using the --editable flag with pip should # also ignore the "name" and "pkgs" parameters. @@ -852,7 +861,9 @@ def installed( ) # If we fail, then just send False, and we'll try again in the next function call except Exception as exc: # pylint: disable=broad-except - log.exception(exc) + logger.exception( + f"Pre-caching of PIP packages during states.pip.installed failed by exception from pip.list: {exc}" + ) pip_list = False for prefix, state_pkg_name, version_spec in pkgs_details: @@ -872,7 +883,7 @@ def installed( index_url, extra_index_url, pip_list, - **kwargs + **kwargs, ) # If _check_if_installed result is None, something went wrong with # the command running. This way we keep stateful output. @@ -978,7 +989,7 @@ def installed( no_cache_dir=no_cache_dir, extra_args=extra_args, disable_version_check=True, - **kwargs + **kwargs, ) if pip_install_call and pip_install_call.get("retcode", 1) == 0: @@ -1043,7 +1054,7 @@ def installed( user=user, cwd=cwd, env_vars=env_vars, - **kwargs + **kwargs, ) ) diff --git a/salt/states/pkg.py b/salt/states/pkg.py index c14cec9f7cc4..08ddd83b82e6 100644 --- a/salt/states/pkg.py +++ b/salt/states/pkg.py @@ -72,6 +72,7 @@ import os import re +import salt.utils.args import salt.utils.pkg import salt.utils.platform import salt.utils.versions @@ -257,7 +258,7 @@ def _find_download_targets( normalize=True, skip_suggestions=False, ignore_epoch=None, - **kwargs + **kwargs, ): """ Inspect the arguments to pkg.downloaded and discover what packages need to @@ -512,7 +513,7 @@ def _find_install_targets( ignore_epoch=None, reinstall=False, refresh=False, - **kwargs + **kwargs, ): """ Inspect the arguments to pkg.installed and discover what packages need to @@ -760,7 +761,9 @@ def _find_install_targets( err = "Unable to cache {0}: {1}" try: cached_path = __salt__["cp.cache_file"]( - version_string, saltenv=kwargs["saltenv"] + version_string, + saltenv=kwargs["saltenv"], + verify_ssl=kwargs.get("verify_ssl", True), ) except CommandExecutionError as exc: problems.append(err.format(version_string, exc)) @@ -797,7 +800,7 @@ def _find_install_targets( package_name, ignore_types=ignore_types, verify_options=verify_options, - **kwargs + **kwargs, ) except (CommandExecutionError, SaltInvocationError) as exc: failed_verify = exc.strerror @@ -831,7 +834,7 @@ def _find_install_targets( package_name, ignore_types=ignore_types, verify_options=verify_options, - **kwargs + **kwargs, ) except (CommandExecutionError, SaltInvocationError) as exc: failed_verify = exc.strerror @@ -1004,7 +1007,7 @@ def installed( ignore_epoch=None, reinstall=False, update_holds=False, - **kwargs + **kwargs, ): """ Ensure that the package is installed, and that it is the correct version @@ -1710,7 +1713,7 @@ def installed( ignore_epoch=ignore_epoch, reinstall=reinstall, refresh=refresh, - **kwargs + **kwargs, ) try: @@ -1877,7 +1880,7 @@ def installed( update_holds=update_holds, ignore_epoch=ignore_epoch, split_arch=False, - **kwargs + **kwargs, ) except CommandExecutionError as exc: ret = {"name": name, "result": False} @@ -2094,7 +2097,7 @@ def installed( reinstall_pkg, ignore_types=ignore_types, verify_options=verify_options, - **kwargs + **kwargs, ) if verify_result: failed.append(reinstall_pkg) @@ -2283,7 +2286,7 @@ def downloaded( downloadonly=True, fromrepo=fromrepo, ignore_epoch=ignore_epoch, - **kwargs + **kwargs, ) ret["result"] = True ret["changes"].update(pkg_ret) @@ -2457,7 +2460,7 @@ def latest( skip_verify=False, pkgs=None, watch_flags=True, - **kwargs + **kwargs, ): """ Ensure that the named package is installed and the latest available @@ -2766,7 +2769,7 @@ def latest( fromrepo=fromrepo, skip_verify=skip_verify, pkgs=targeted_pkgs, - **kwargs + **kwargs, ) except CommandExecutionError as exc: return { @@ -2786,7 +2789,8 @@ def latest( x for x in targets if not changes.get(x) - or changes[x].get("new") != targets[x] + or changes[x].get("new") is not None + and targets[x] not in changes[x].get("new").split(",") and targets[x] != "latest" ] successful = [x for x in targets if x not in failed] @@ -2874,7 +2878,7 @@ def _uninstall( pkgs=None, normalize=True, ignore_epoch=None, - **kwargs + **kwargs, ): """ Common function for package removal @@ -2938,7 +2942,6 @@ def _uninstall( } if __opts__["test"]: - _changes = {} _changes.update({x: {"new": "{}d".format(action), "old": ""} for x in targets}) @@ -3081,7 +3084,7 @@ def removed(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, ** pkgs=pkgs, normalize=normalize, ignore_epoch=ignore_epoch, - **kwargs + **kwargs, ) except CommandExecutionError as exc: ret = {"name": name, "result": False} @@ -3173,7 +3176,7 @@ def purged(name, version=None, pkgs=None, normalize=True, ignore_epoch=None, **k pkgs=pkgs, normalize=normalize, ignore_epoch=ignore_epoch, - **kwargs + **kwargs, ) except CommandExecutionError as exc: ret = {"name": name, "result": False} @@ -3304,9 +3307,16 @@ def group_installed(name, skip=None, include=None, **kwargs): .. versionchanged:: 2016.11.0 Added support in :mod:`pacman ` + .. versionchanged:: 3006.2 + For RPM-based systems, support for ``fromrepo``, ``enablerepo``, and + ``disablerepo`` (as used in :py:func:`pkg.install + `) has been added. This allows one to, for + example, use ``enablerepo`` to perform a group install from a repo that + is otherwise disabled. + Ensure that an entire package group is installed. This state is currently - only supported for the :mod:`yum ` and :mod:`pacman ` - package managers. + only supported for the :mod:`yum ` and :mod:`pacman + ` package managers. skip Packages that would normally be installed by the package group @@ -3336,6 +3346,45 @@ def group_installed(name, skip=None, include=None, **kwargs): This option can no longer be passed as a comma-separated list, it must now be passed as a list (as shown in the above example). + .. note:: + The below options are only supported on RPM-based systems + + fromrepo + Restrict ``yum groupinfo`` to the specified repo(s). + (e.g., ``yum --disablerepo='*' --enablerepo='somerepo'``) + + .. code-block:: yaml + + MyGroup: + pkg.group_installed: + - fromrepo: base,updates + + .. versionadded:: 3006.2 + + enablerepo (ignored if ``fromrepo`` is specified) + Specify a disabled package repository (or repositories) to enable. + (e.g., ``yum --enablerepo='somerepo'``) + + .. code-block:: yaml + + MyGroup: + pkg.group_installed: + - enablerepo: myrepo + + .. versionadded:: 3006.2 + + disablerepo (ignored if ``fromrepo`` is specified) + Specify an enabled package repository (or repositories) to disable. + (e.g., ``yum --disablerepo='somerepo'``) + + .. code-block:: yaml + + MyGroup: + pkg.group_installed: + - disablerepo: epel + + .. versionadded:: 3006.2 + .. note:: Because this is essentially a wrapper around :py:func:`pkg.install `, any argument which can be passed to @@ -3369,13 +3418,16 @@ def group_installed(name, skip=None, include=None, **kwargs): include[idx] = str(item) try: - diff = __salt__["pkg.group_diff"](name) - except CommandExecutionError as err: - ret[ - "comment" - ] = "An error was encountered while installing/updating group '{}': {}.".format( - name, err + diff = __salt__["pkg.group_diff"]( + name, **salt.utils.args.clean_kwargs(**kwargs) ) + except (CommandExecutionError, TypeError) as err: + if "unexpected keyword argument" in str(err): + ret["comment"] = "Repo options are not supported on this platform" + else: + ret[ + "comment" + ] = f"An error was encountered while installing/updating group '{name}': {err}." return ret mandatory = diff["mandatory"]["installed"] + diff["mandatory"]["not installed"] diff --git a/salt/states/pkgrepo.py b/salt/states/pkgrepo.py index 67a50c3ca026..7a55c5c0c59f 100644 --- a/salt/states/pkgrepo.py +++ b/salt/states/pkgrepo.py @@ -620,6 +620,12 @@ def absent(name, **kwargs): The name of the package repo, as it would be referred to when running the regular package manager commands. + .. note:: + On apt-based systems this must be the complete source entry. For + example, if you include ``[arch=amd64]``, and a repo matching the + specified URI, dist, etc. exists _without_ an architecture, then no + changes will be made and the state will report a ``True`` result. + **FEDORA/REDHAT-SPECIFIC OPTIONS** copr @@ -702,6 +708,23 @@ def absent(name, **kwargs): ret["comment"] = "Failed to configure repo '{}': {}".format(name, exc) return ret + if repo and ( + __grains__["os_family"].lower() == "debian" + or __opts__.get("providers", {}).get("pkg") == "aptpkg" + ): + # On Debian/Ubuntu, pkg.get_repo will return a match for the repo + # even if the architectures do not match. However, changing get_repo + # breaks idempotency for pkgrepo.managed states. So, compare the + # architectures of the matched repo to the architectures specified in + # the repo string passed to this state. If the architectures do not + # match, then invalidate the match by setting repo to an empty dict. + from salt.modules.aptpkg import _split_repo_str + + if set(_split_repo_str(stripname)["architectures"]) != set( + repo["architectures"] + ): + repo = {} + if not repo: ret["comment"] = "Package repo {} is absent".format(name) ret["result"] = True diff --git a/salt/states/saltmod.py b/salt/states/saltmod.py index 7ae8dae37a0d..1f30d0fa38a7 100644 --- a/salt/states/saltmod.py +++ b/salt/states/saltmod.py @@ -125,7 +125,7 @@ def state( subset=None, orchestration_jid=None, failhard=None, - **kwargs + **kwargs, ): """ Invoke a state run on a given target @@ -454,7 +454,7 @@ def function( batch=None, subset=None, failhard=None, - **kwargs + **kwargs, ): # pylint: disable=unused-argument """ Execute a single module function on a remote minion via salt or salt-ssh @@ -780,6 +780,14 @@ def runner(name, **kwargs): log.debug("Unable to fire args event due to missing __orchestration_jid__") jid = None + try: + kwargs["__pub_user"] = __user__ + log.debug( + f"added __pub_user to kwargs using dunder user '{__user__}', kwargs '{kwargs}'" + ) + except NameError: + log.warning("unable to find user for fire args event due to missing __user__") + if __opts__.get("test", False): ret = { "name": name, @@ -899,7 +907,7 @@ def call_runner(runner_config): __orchestration_jid__=jid, __env__=__env__, full_return=True, - **(runner_config.get("kwarg", {})) + **(runner_config.get("kwarg", {})), ) try: @@ -1033,7 +1041,7 @@ def wheel(name, **kwargs): jid = None if __opts__.get("test", False): - ret["result"] = (None,) + ret["result"] = None ret["changes"] = {} ret["comment"] = "Wheel function '{}' would be executed.".format(name) return ret diff --git a/salt/states/schedule.py b/salt/states/schedule.py index c92c4c7827cf..f7d9d56e2b1f 100644 --- a/salt/states/schedule.py +++ b/salt/states/schedule.py @@ -214,11 +214,15 @@ def present(name, **kwargs): Whether the scheduled job should run immediately after the skip_during_range time period ends. + offline + Add the scheduled job to the Salt minion when the Salt minion is not running. """ ret = {"name": name, "result": True, "changes": {}, "comment": []} - current_schedule = __salt__["schedule.list"](show_all=True, return_yaml=False) + current_schedule = __salt__["schedule.list"]( + show_all=True, return_yaml=False, offline=kwargs.get("offline") + ) if name in current_schedule: new_item = __salt__["schedule.build_schedule_item"](name, **kwargs) @@ -289,7 +293,9 @@ def absent(name, **kwargs): ret = {"name": name, "result": True, "changes": {}, "comment": []} - current_schedule = __salt__["schedule.list"](show_all=True, return_yaml=False) + current_schedule = __salt__["schedule.list"]( + show_all=True, return_yaml=False, offline=kwargs.get("offline") + ) if name in current_schedule: if "test" in __opts__ and __opts__["test"]: kwargs["test"] = True @@ -357,11 +363,15 @@ def disabled(name, **kwargs): persist Whether changes to the scheduled job should be saved, defaults to True. + offline + Delete the scheduled job to the Salt minion when the Salt minion is not running. """ ret = {"name": name, "result": True, "changes": {}, "comment": []} - current_schedule = __salt__["schedule.list"](show_all=True, return_yaml=False) + current_schedule = __salt__["schedule.list"]( + show_all=True, return_yaml=False, offline=kwargs.get("offline") + ) if name in current_schedule: if "test" in __opts__ and __opts__["test"]: kwargs["test"] = True diff --git a/salt/states/service.py b/salt/states/service.py index 93c7c4fb0707..0bb04c34cc14 100644 --- a/salt/states/service.py +++ b/salt/states/service.py @@ -19,6 +19,13 @@ please see the :ref:`Overriding Virtual Module Providers ` section of Salt's module documentation to work around possible errors. +For services managed by systemd, the systemd_service module includes a built-in +feature to reload the daemon when unit files are changed or extended. This +feature is used automatically by the service state and the systemd_service +module when running on a systemd minion, so there is no need to set up your own +methods of reloading the daemon. If you need to manually reload the daemon for +some reason, you can use the :func:`systemd_service.systemctl_reload ` function provided by Salt. + .. note:: The current status of a service is determined by the return code of the init/rc script status command. A status return code of 0 it is considered running. Any diff --git a/salt/states/smartos.py b/salt/states/smartos.py index 9ae2c722036e..7420d6ba33ff 100644 --- a/salt/states/smartos.py +++ b/salt/states/smartos.py @@ -1311,6 +1311,3 @@ def vm_stopped(name): ret["comment"] = "vm {} stopped".format(name) return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/states/ssh_auth.py b/salt/states/ssh_auth.py index 5e80e71c8d48..1806a34709aa 100644 --- a/salt/states/ssh_auth.py +++ b/salt/states/ssh_auth.py @@ -84,7 +84,7 @@ def _present_test( for key, status in keys.items(): if status == "exists": continue - comment += "Set to {}: {}\n".format(status, key) + comment += f"Set to {status}: {key}\n" if comment: return result, comment err = sys.modules[__salt__["test.ping"].__module__].__context__.pop( @@ -95,7 +95,7 @@ def _present_test( else: return ( True, - "All host keys in file {} are already present".format(source), + f"All host keys in file {source} are already present", ) else: # check if this is of form {options} {enc} {key} {comment} @@ -128,9 +128,9 @@ def _present_test( fingerprint_hash_type=fingerprint_hash_type, ) if check == "update": - comment = "Key {} for user {} is set to be updated".format(name, user) + comment = f"Key {name} for user {user} is set to be updated" elif check == "add": - comment = "Key {} for user {} is set to be added".format(name, user) + comment = f"Key {name} for user {user} is set to be added" elif check == "exists": result = True comment = "The authorized host key {} is already present for user {}".format( @@ -160,7 +160,7 @@ def _absent_test( for key, status in list(keys.items()): if status == "add": continue - comment += "Set to remove: {}\n".format(key) + comment += f"Set to remove: {key}\n" if comment: return result, comment err = sys.modules[__salt__["test.ping"].__module__].__context__.pop( @@ -169,7 +169,7 @@ def _absent_test( if err: return False, err else: - return (True, "All host keys in file {} are already absent".format(source)) + return (True, f"All host keys in file {source} are already absent") else: # check if this is of form {options} {enc} {key} {comment} sshre = re.compile(r"^(.*?)\s?((?:sk-)?(?:ssh\-|ecds)[\w-]+\s.+)$") @@ -201,7 +201,7 @@ def _absent_test( fingerprint_hash_type=fingerprint_hash_type, ) if check == "update" or check == "exists": - comment = "Key {} for user {} is set for removal".format(name, user) + comment = f"Key {name} for user {user} is set for removal" else: comment = "Key is already absent" result = True @@ -218,7 +218,7 @@ def present( options=None, config=".ssh/authorized_keys", fingerprint_hash_type=None, - **kwargs + **kwargs, ): """ Verifies that the specified SSH key is present for the specified user @@ -524,7 +524,7 @@ def manage( options=None, config=".ssh/authorized_keys", fingerprint_hash_type=None, - **kwargs + **kwargs, ): """ .. versionadded:: 3000 @@ -580,15 +580,29 @@ def manage( # gather list potential ssh keys for removal comparison # options, enc, and comments could be in the mix all_potential_keys.extend(ssh_key.split(" ")) - existing_keys = __salt__["ssh.auth_keys"](user=user).keys() + + existing_keys = __salt__["ssh.auth_keys"]( + user=user, config=config, fingerprint_hash_type=fingerprint_hash_type + ).keys() + remove_keys = set(existing_keys).difference(all_potential_keys) for remove_key in remove_keys: if __opts__["test"]: - remove_comment = "{} Key set for removal".format(remove_key) + remove_comment = f"{remove_key} Key set for removal" ret["comment"] = remove_comment ret["result"] = None else: - remove_comment = absent(remove_key, user)["comment"] + remove_comment = absent( + remove_key, + user, + enc=enc, + comment=comment, + source=source, + options=options, + config=config, + fingerprint_hash_type=fingerprint_hash_type, + **kwargs, + )["comment"] ret["changes"][remove_key] = remove_comment for ssh_key in ssh_keys: @@ -601,7 +615,7 @@ def manage( options, config, fingerprint_hash_type, - **kwargs + **kwargs, ) if run_return["changes"]: ret["changes"].update(run_return["changes"]) diff --git a/salt/states/user.py b/salt/states/user.py index ed2d5a05f486..d575655cf345 100644 --- a/salt/states/user.py +++ b/salt/states/user.py @@ -40,11 +40,29 @@ def _group_changes(cur, wanted, remove=False): """ Determine if the groups need to be changed """ - old = set(cur) - new = set(wanted) - if (remove and old != new) or (not remove and not new.issubset(old)): - return True - return False + cur = set(cur) + wanted = set(wanted) + + if cur == wanted or (not remove and wanted.issubset(cur)): + return False + + all_grps = {name: __salt__["group.info"](name) for name in cur.union(wanted)} + + if remove: + diff = wanted.symmetric_difference(cur) + else: + diff = wanted.difference(cur) + + remain = list(diff) + for diff_grp in diff: + for grp, info in all_grps.items(): + if grp == diff_grp: + continue + if all_grps[diff_grp]["gid"] == info["gid"]: + # dupe detected + remain.remove(diff_grp) + + return bool(remain) def _changes( @@ -100,7 +118,16 @@ def _changes( change = {} wanted_groups = sorted(set((groups or []) + (optional_groups or []))) - if not remove_groups: + lusr_groups_gids = [ + __salt__["file.group_to_gid"](gname) for gname in lusr["groups"] + ] + dupe_groups = {} + for idx, _gid in enumerate(lusr_groups_gids): + if lusr_groups_gids.count(_gid) > 1: + if _gid not in dupe_groups: + dupe_groups[_gid] = [] + dupe_groups[_gid].append(lusr["groups"][idx]) + if not remove_groups or groups is None and not optional_groups: wanted_groups = sorted(set(wanted_groups + lusr["groups"])) if uid and lusr["uid"] != uid: change["uid"] = uid @@ -109,24 +136,44 @@ def _changes( default_grp = __salt__["file.gid_to_group"](gid if gid is not None else lusr["gid"]) old_default_grp = __salt__["file.gid_to_group"](lusr["gid"]) # Remove the default group from the list for comparison purposes. - if default_grp in lusr["groups"]: - lusr["groups"].remove(default_grp) + # Remove default group from wanted_groups, as this requirement is + # already met + if default_grp in lusr["groups"] or default_grp in wanted_groups: + if default_grp in salt.utils.data.flatten(dupe_groups.values()): + dupe_gid = __salt__["file.group_to_gid"](default_grp) + for gname in dupe_groups[dupe_gid]: + if gname in lusr["groups"]: + lusr["groups"].remove(gname) + if gname in wanted_groups: + wanted_groups.remove(gname) + else: + if default_grp in lusr["groups"]: + lusr["groups"].remove(default_grp) + if default_grp in wanted_groups: + wanted_groups.remove(default_grp) # If the group is being changed, make sure that the old primary group is # also removed from the list. Otherwise, if a user's gid is being changed # and their old primary group is reassigned as an additional group, Salt # will not properly detect the need for the change. if old_default_grp != default_grp and old_default_grp in lusr["groups"]: - lusr["groups"].remove(old_default_grp) + if old_default_grp in salt.utils.data.flatten(dupe_groups.values()): + dupe_gid = __salt__["file.group_to_gid"](old_default_grp) + for gname in dupe_groups[dupe_gid]: + lusr["groups"].remove(gname) + else: + lusr["groups"].remove(old_default_grp) # If there's a group by the same name as the user, remove it from the list # for comparison purposes. if name in lusr["groups"] and name not in wanted_groups: - lusr["groups"].remove(name) - # Remove default group from wanted_groups, as this requirement is - # already met - if default_grp in wanted_groups: - wanted_groups.remove(default_grp) + if name in salt.utils.data.flatten(dupe_groups.values()): + dupe_gid = __salt__["file.group_to_gid"](name) + for gname in dupe_groups[dupe_gid]: + lusr["groups"].remove(gname) + else: + lusr["groups"].remove(name) if _group_changes(lusr["groups"], wanted_groups, remove_groups): - change["groups"] = wanted_groups + if wanted_groups or remove_groups: + change["groups"] = wanted_groups if home and lusr["home"] != home: change["home"] = home if createhome: diff --git a/salt/states/win_lgpo_reg.py b/salt/states/win_lgpo_reg.py index 01b10e4e6101..8377817a1985 100644 --- a/salt/states/win_lgpo_reg.py +++ b/salt/states/win_lgpo_reg.py @@ -72,21 +72,25 @@ def __virtual__(): return __virtualname__ -def _format_changes(changes, key, v_name): +def _get_current(key, name, policy_class): """ - Reformat the changes dictionary to group new and old together. + Helper function to get the current state of the policy """ - new_changes = {"new": {}, "old": {}} - for item in changes: - if changes[item]["new"]: - new_changes["new"][item] = changes[item]["new"] - new_changes["new"]["key"] = key - new_changes["new"]["name"] = v_name - if changes[item]["old"]: - new_changes["old"][item] = changes[item]["old"] - new_changes["old"]["key"] = key - new_changes["old"]["name"] = v_name - return new_changes + hive = "HKLM" + if policy_class == "User": + hive = "HKCU" + pol = __salt__["lgpo_reg.get_value"]( + key=key, v_name=name, policy_class=policy_class + ) + reg_raw = __utils__["reg.read_value"](hive=hive, key=key, vname=name) + + reg = {} + if reg_raw["vdata"] is not None: + reg["data"] = reg_raw["vdata"] + if reg_raw["vtype"] is not None: + reg["type"] = reg_raw["vtype"] + + return {"pol": pol, "reg": reg} def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine"): @@ -145,16 +149,29 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine") """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = ( + str(old["pol"].get("data", "")) == str(v_data) + and old["pol"].get("type", "") == v_type + ) + reg_correct = ( + str(old["reg"].get("data", "")) == str(v_data) + and old["reg"].get("type", "") == v_type ) - if old.get("data", "") == v_data and old.get("type", "") == v_type: - ret["comment"] = "Registry.pol value already present" + + if pol_correct and reg_correct: + ret["comment"] = "Policy value already present\nRegistry value already present" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be set" + if not pol_correct: + ret["comment"] = "Policy value will be set" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be set" ret["result"] = None return ret @@ -166,16 +183,29 @@ def value_present(name, key, v_data, v_type="REG_DWORD", policy_class="Machine") policy_class=policy_class, ) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class + new = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = ( + str(new["pol"]["data"]) == str(v_data) and new["pol"]["type"] == v_type + ) + reg_correct = ( + str(new["reg"]["data"]) == str(v_data) and new["reg"]["type"] == v_type ) - changes = salt.utils.data.compare_dicts(old, new) + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value has been set" + ret["result"] = True + elif not pol_correct: + ret["comment"] = "Failed to set policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to set registry value" + + changes = salt.utils.data.recursive_diff(old, new) if changes: - ret["comment"] = "Registry.pol value has been set" - ret["changes"] = _format_changes(changes, key, name) - ret["result"] = True + ret["changes"] = changes return ret @@ -219,31 +249,47 @@ def value_disabled(name, key, policy_class="Machine"): """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) - if old.get("data", "") == "**del.{}".format(name): - ret["comment"] = "Registry.pol value already disabled" + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = old["pol"].get("data", "") == "**del.{}".format(name) + reg_correct = old["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value already disabled" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be disabled" + if not pol_correct: + ret["comment"] = "Policy value will be disabled" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be removed" ret["result"] = None return ret __salt__["lgpo_reg.disable_value"](key=key, v_name=name, policy_class=policy_class) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) + new = _get_current(key=key, name=name, policy_class=policy_class) - changes = salt.utils.data.compare_dicts(old, new) + pol_correct = new["pol"].get("data", "") == "**del.{}".format(name) + reg_correct = new["reg"] == {} - if changes: - ret["comment"] = "Registry.pol value enabled" - ret["changes"] = _format_changes(changes, key, name) + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value disabled" ret["result"] = True + elif not pol_correct: + ret["comment"] = "Failed to disable policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to remove registry value" + + changes = salt.utils.data.recursive_diff(old, new) + + if changes: + ret["changes"] = changes return ret @@ -287,33 +333,46 @@ def value_absent(name, key, policy_class="Machine"): """ ret = {"name": name, "changes": {}, "result": False, "comment": ""} - old = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) - if not old: - ret["comment"] = "Registry.pol value already absent" + old = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = old["pol"] == {} + reg_correct = old["reg"] == {} + + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value already deleted" ret["result"] = True return ret if __opts__["test"]: - ret["comment"] = "Registry.pol value will be deleted" + if not pol_correct: + ret["comment"] = "Policy value will be deleted" + if not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Registry value will be deleted" ret["result"] = None return ret __salt__["lgpo_reg.delete_value"](key=key, v_name=name, policy_class=policy_class) - new = __salt__["lgpo_reg.get_value"]( - key=key, v_name=name, policy_class=policy_class - ) + new = _get_current(key=key, name=name, policy_class=policy_class) + + pol_correct = new["pol"] == {} + reg_correct = new["reg"] == {} - if new is None: - new = {} + if pol_correct and reg_correct: + ret["comment"] = "Registry policy value deleted" + ret["result"] = True + elif not pol_correct: + ret["comment"] = "Failed to delete policy value" + elif not reg_correct: + if ret["comment"]: + ret["comment"] += "\n" + ret["comment"] += "Failed to delete registry value" - changes = salt.utils.data.compare_dicts(old, new) + changes = salt.utils.data.recursive_diff(old, new) if changes: - ret["comment"] = "Registry.pol value deleted" - ret["changes"] = _format_changes(changes, key, name) - ret["result"] = True + ret["changes"] = changes return ret diff --git a/salt/states/x509_v2.py b/salt/states/x509_v2.py index 35790fff24ae..ee04eba610a4 100644 --- a/salt/states/x509_v2.py +++ b/salt/states/x509_v2.py @@ -82,7 +82,7 @@ - keysize: 4096 - backup: true - require: - - file: /etc/pki + - file: /etc/pki/issued_certs Create self-signed CA certificate: x509.certificate_managed: @@ -1274,7 +1274,7 @@ def private_key_managed( keysize For ``rsa``, specifies the bitlength of the private key (2048, 3072, 4096). For ``ec``, specifies the NIST curve to use (256, 384, 521). - Irrelevant for Edwards-curve schemes (`ed25519``, ``ed448``). + Irrelevant for Edwards-curve schemes (``ed25519``, ``ed448``). Defaults to 2048 for RSA and 256 for EC. passphrase @@ -1450,7 +1450,7 @@ def private_key_managed( and algo in ("rsa", "ec") and current.key_size != check_keysize ): - changes["keysize"] = keysize + changes["keysize"] = check_keysize if encoding != current_encoding: changes["encoding"] = encoding elif file_exists and new: diff --git a/salt/states/zabbix_action.py b/salt/states/zabbix_action.py index ba5e2992731f..01877edcacd3 100644 --- a/salt/states/zabbix_action.py +++ b/salt/states/zabbix_action.py @@ -1,7 +1,7 @@ """ Management of Zabbix Action object over Zabbix API. -.. versionadded:: 2017.7 +.. versionadded:: 2017.7.0 :codeauthor: Jakub Sliva """ diff --git a/salt/states/zabbix_template.py b/salt/states/zabbix_template.py index 70e87ea2e4f0..53efe618cac0 100644 --- a/salt/states/zabbix_template.py +++ b/salt/states/zabbix_template.py @@ -1,5 +1,5 @@ """ -.. versionadded:: 2017.7 +.. versionadded:: 2017.7.0 Management of Zabbix Template object over Zabbix API. diff --git a/salt/states/zabbix_valuemap.py b/salt/states/zabbix_valuemap.py index 006e4a7b35c4..23c14e16869a 100644 --- a/salt/states/zabbix_valuemap.py +++ b/salt/states/zabbix_valuemap.py @@ -1,7 +1,7 @@ """ Management of Zabbix Valuemap object over Zabbix API. -.. versionadded:: 2017.7 +.. versionadded:: 2017.7.0 :codeauthor: Jakub Sliva """ diff --git a/salt/states/zfs.py b/salt/states/zfs.py index 855ec6383410..b28517ed5a12 100644 --- a/salt/states/zfs.py +++ b/salt/states/zfs.py @@ -1090,6 +1090,3 @@ def scheduled_snapshot(name, prefix, recursive=True, schedule=None): ret["comment"] = "scheduled snapshots are up to date" return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/states/zone.py b/salt/states/zone.py index 3d8d07bb6f87..d411ad33a0aa 100644 --- a/salt/states/zone.py +++ b/salt/states/zone.py @@ -1274,6 +1274,3 @@ def uninstalled(name): ret["comment"] = "zone {} is not configured!".format(name) return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/states/zpool.py b/salt/states/zpool.py index 9900dde9ea1f..4c8c7b4f3af3 100644 --- a/salt/states/zpool.py +++ b/salt/states/zpool.py @@ -442,6 +442,3 @@ def absent(name, export=False, force=False): ret["comment"] = "storage pool {} is absent".format(name) return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/template.py b/salt/template.py index b56df9725f05..7de759dc4fba 100644 --- a/salt/template.py +++ b/salt/template.py @@ -149,7 +149,9 @@ def compile_template_str(template, renderers, default, blacklist, whitelist): fn_ = salt.utils.files.mkstemp() with salt.utils.files.fopen(fn_, "wb") as ofile: ofile.write(SLS_ENCODER(template)[0]) - return compile_template(fn_, renderers, default, blacklist, whitelist) + ret = compile_template(fn_, renderers, default, blacklist, whitelist) + os.unlink(fn_) + return ret def template_shebang(template, renderers, default, blacklist, whitelist, input_data): diff --git a/salt/tokens/localfs.py b/salt/tokens/localfs.py index 99a239d62f15..61c2d945ad35 100644 --- a/salt/tokens/localfs.py +++ b/salt/tokens/localfs.py @@ -11,6 +11,7 @@ import salt.utils.files import salt.utils.path import salt.utils.verify +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -27,7 +28,7 @@ def mk_token(opts, tdata): :param tdata: Token data to be stored with 'token' attribute of this dict set to the token. :returns: tdata with token if successful. Empty dict if failed. """ - hash_type = getattr(hashlib, opts.get("hash_type", "md5")) + hash_type = getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE)) tok = str(hash_type(os.urandom(512)).hexdigest()) t_path = os.path.join(opts["token_dir"], tok) temp_t_path = "{}.tmp".format(t_path) diff --git a/salt/tokens/rediscluster.py b/salt/tokens/rediscluster.py index 241fe64b869a..dc9bb44d3eac 100644 --- a/salt/tokens/rediscluster.py +++ b/salt/tokens/rediscluster.py @@ -13,12 +13,12 @@ :depends: - redis-py-cluster Python package """ - import hashlib import logging import os import salt.payload +from salt.config import DEFAULT_HASH_TYPE try: import rediscluster @@ -74,7 +74,7 @@ def mk_token(opts, tdata): redis_client = _redis_client(opts) if not redis_client: return {} - hash_type = getattr(hashlib, opts.get("hash_type", "md5")) + hash_type = getattr(hashlib, opts.get("hash_type", DEFAULT_HASH_TYPE)) tok = str(hash_type(os.urandom(512)).hexdigest()) try: while redis_client.get(tok) is not None: diff --git a/salt/transport/base.py b/salt/transport/base.py index 014a9731d59e..2e4f68e4cc02 100644 --- a/salt/transport/base.py +++ b/salt/transport/base.py @@ -1,3 +1,6 @@ +import traceback +import warnings + import salt.ext.tornado.gen TRANSPORTS = ( @@ -94,14 +97,52 @@ def publish_client(opts, io_loop): raise Exception("Transport type not found: {}".format(ttype)) -class RequestClient: +class TransportWarning(Warning): + """ + Transport warning. + """ + + +class Transport: + def __init__(self, *args, **kwargs): + self._trace = "\n".join(traceback.format_stack()[:-1]) + if not hasattr(self, "_closing"): + self._closing = False + if not hasattr(self, "_connect_called"): + self._connect_called = False + + def connect(self, *args, **kwargs): + self._connect_called = True + + # pylint: disable=W1701 + def __del__(self): + """ + Warn the user if the transport's close method was never called. + + If the _closing attribute is missing we won't raise a warning. This + prevents issues when class's dunder init method is called with improper + arguments, and is later getting garbage collected. Users of this class + should take care to call super() and validate the functionality with a + test. + """ + if getattr(self, "_connect_called") and not getattr(self, "_closing", True): + warnings.warn( + f"Unclosed transport! {self!r} \n{self._trace}", + TransportWarning, + source=self, + ) + + # pylint: enable=W1701 + + +class RequestClient(Transport): """ The RequestClient transport is used to make requests and get corresponding replies from the RequestServer. """ def __init__(self, opts, io_loop, **kwargs): - pass + super().__init__() @salt.ext.tornado.gen.coroutine def send(self, load, timeout=60): @@ -116,7 +157,7 @@ def close(self): """ raise NotImplementedError - def connect(self): + def connect(self): # pylint: disable=W0221 """ Connect to the server / broker. """ @@ -197,13 +238,13 @@ def publish_daemon( raise NotImplementedError -class PublishClient: +class PublishClient(Transport): """ The PublishClient receives messages from the PublishServer and runs a callback. """ def __init__(self, opts, io_loop, **kwargs): - pass + super().__init__() def on_recv(self, callback): """ @@ -212,7 +253,9 @@ def on_recv(self, callback): raise NotImplementedError @salt.ext.tornado.gen.coroutine - def connect(self, publish_port, connect_callback=None, disconnect_callback=None): + def connect( # pylint: disable=arguments-differ + self, publish_port, connect_callback=None, disconnect_callback=None + ): """ Create a network connection to the the PublishServer or broker. """ diff --git a/salt/transport/client.py b/salt/transport/client.py index 7ffc97fe8e76..bd79ac357b4c 100644 --- a/salt/transport/client.py +++ b/salt/transport/client.py @@ -13,8 +13,6 @@ log = logging.getLogger(__name__) -# XXX: Add depreication warnings to start using salt.channel.client - class ReqChannel: """ diff --git a/salt/transport/ipc.py b/salt/transport/ipc.py index ca13a498e3e9..453afaaad78e 100644 --- a/salt/transport/ipc.py +++ b/salt/transport/ipc.py @@ -13,7 +13,6 @@ import salt.ext.tornado.gen import salt.ext.tornado.ioloop import salt.ext.tornado.netutil -import salt.transport.client import salt.transport.frame import salt.utils.msgpack from salt.ext.tornado.ioloop import IOLoop diff --git a/salt/transport/local.py b/salt/transport/local.py index 49fb1e0b588a..e0a22b78cb1a 100644 --- a/salt/transport/local.py +++ b/salt/transport/local.py @@ -1,7 +1,7 @@ import logging import salt.utils.files -from salt.transport.client import ReqChannel +from salt.channel.client import ReqChannel log = logging.getLogger(__name__) diff --git a/salt/transport/tcp.py b/salt/transport/tcp.py index 6a9e11389403..2c3b5644fe65 100644 --- a/salt/transport/tcp.py +++ b/salt/transport/tcp.py @@ -15,6 +15,7 @@ import socket import threading import urllib +import uuid import salt.ext.tornado import salt.ext.tornado.concurrent @@ -25,10 +26,8 @@ import salt.ext.tornado.tcpserver import salt.master import salt.payload -import salt.transport.client import salt.transport.frame import salt.transport.ipc -import salt.transport.server import salt.utils.asynchronous import salt.utils.files import salt.utils.msgpack @@ -36,6 +35,7 @@ import salt.utils.versions from salt.exceptions import SaltClientError, SaltReqTimeoutError from salt.utils.network import ip_bracket +from salt.utils.process import SignalHandlingProcess if salt.utils.platform.is_windows(): USE_LOAD_BALANCER = True @@ -44,7 +44,6 @@ if USE_LOAD_BALANCER: import salt.ext.tornado.util - from salt.utils.process import SignalHandlingProcess log = logging.getLogger(__name__) @@ -129,69 +128,64 @@ def _set_tcp_keepalive(sock, opts): sock.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, 0) -if USE_LOAD_BALANCER: +class LoadBalancerServer(SignalHandlingProcess): + """ + Raw TCP server which runs in its own process and will listen + for incoming connections. Each incoming connection will be + sent via multiprocessing queue to the workers. + Since the queue is shared amongst workers, only one worker will + handle a given connection. + """ - class LoadBalancerServer(SignalHandlingProcess): - """ - Raw TCP server which runs in its own process and will listen - for incoming connections. Each incoming connection will be - sent via multiprocessing queue to the workers. - Since the queue is shared amongst workers, only one worker will - handle a given connection. - """ + # TODO: opts! + # Based on default used in salt.ext.tornado.netutil.bind_sockets() + backlog = 128 - # TODO: opts! - # Based on default used in salt.ext.tornado.netutil.bind_sockets() - backlog = 128 + def __init__(self, opts, socket_queue, **kwargs): + super().__init__(**kwargs) + self.opts = opts + self.socket_queue = socket_queue + self._socket = None - def __init__(self, opts, socket_queue, **kwargs): - super().__init__(**kwargs) - self.opts = opts - self.socket_queue = socket_queue + def close(self): + if self._socket is not None: + self._socket.shutdown(socket.SHUT_RDWR) + self._socket.close() self._socket = None - def close(self): - if self._socket is not None: - self._socket.shutdown(socket.SHUT_RDWR) - self._socket.close() - self._socket = None - - # pylint: disable=W1701 - def __del__(self): - self.close() - - # pylint: enable=W1701 + # pylint: disable=W1701 + def __del__(self): + self.close() - def run(self): - """ - Start the load balancer - """ - self._socket = _get_socket(self.opts) - self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) - _set_tcp_keepalive(self._socket, self.opts) - self._socket.setblocking(1) - self._socket.bind(_get_bind_addr(self.opts, "ret_port")) - self._socket.listen(self.backlog) + # pylint: enable=W1701 - while True: - try: - # Wait for a connection to occur since the socket is - # blocking. - connection, address = self._socket.accept() - # Wait for a free slot to be available to put - # the connection into. - # Sockets are picklable on Windows in Python 3. - self.socket_queue.put((connection, address), True, None) - except OSError as e: - # ECONNABORTED indicates that there was a connection - # but it was closed while still in the accept queue. - # (observed on FreeBSD). - if ( - salt.ext.tornado.util.errno_from_exception(e) - == errno.ECONNABORTED - ): - continue - raise + def run(self): + """ + Start the load balancer + """ + self._socket = _get_socket(self.opts) + self._socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + _set_tcp_keepalive(self._socket, self.opts) + self._socket.setblocking(1) + self._socket.bind(_get_bind_addr(self.opts, "ret_port")) + self._socket.listen(self.backlog) + + while True: + try: + # Wait for a connection to occur since the socket is + # blocking. + connection, address = self._socket.accept() + # Wait for a free slot to be available to put + # the connection into. + # Sockets are picklable on Windows in Python 3. + self.socket_queue.put((connection, address), True, None) + except OSError as e: + # ECONNABORTED indicates that there was a connection + # but it was closed while still in the accept queue. + # (observed on FreeBSD). + if salt.ext.tornado.util.errno_from_exception(e) == errno.ECONNABORTED: + continue + raise class Resolver: @@ -219,6 +213,7 @@ class TCPPubClient(salt.transport.base.PublishClient): ttype = "tcp" def __init__(self, opts, io_loop, **kwargs): # pylint: disable=W0231 + super().__init__(opts, io_loop, **kwargs) self.opts = opts self.io_loop = io_loop self.message_client = None @@ -234,14 +229,9 @@ def close(self): self.message_client.close() self.message_client = None - # pylint: disable=W1701 - def __del__(self): - self.close() - - # pylint: enable=W1701 - @salt.ext.tornado.gen.coroutine def connect(self, publish_port, connect_callback=None, disconnect_callback=None): + self._connect_called = True self.publish_port = publish_port self.message_client = MessageClient( self.opts, @@ -469,45 +459,43 @@ def close(self): raise -if USE_LOAD_BALANCER: - - class LoadBalancerWorker(SaltMessageServer): - """ - This will receive TCP connections from 'LoadBalancerServer' via - a multiprocessing queue. - Since the queue is shared amongst workers, only one worker will handle - a given connection. - """ +class LoadBalancerWorker(SaltMessageServer): + """ + This will receive TCP connections from 'LoadBalancerServer' via + a multiprocessing queue. + Since the queue is shared amongst workers, only one worker will handle + a given connection. + """ - def __init__(self, socket_queue, message_handler, *args, **kwargs): - super().__init__(message_handler, *args, **kwargs) - self.socket_queue = socket_queue - self._stop = threading.Event() - self.thread = threading.Thread(target=self.socket_queue_thread) - self.thread.start() + def __init__(self, socket_queue, message_handler, *args, **kwargs): + super().__init__(message_handler, *args, **kwargs) + self.socket_queue = socket_queue + self._stop = threading.Event() + self.thread = threading.Thread(target=self.socket_queue_thread) + self.thread.start() - def close(self): - self._stop.set() - self.thread.join() - super().close() + def close(self): + self._stop.set() + self.thread.join() + super().close() - def socket_queue_thread(self): - try: - while True: - try: - client_socket, address = self.socket_queue.get(True, 1) - except queue.Empty: - if self._stop.is_set(): - break - continue - # 'self.io_loop' initialized in super class - # 'salt.ext.tornado.tcpserver.TCPServer'. - # 'self._handle_connection' defined in same super class. - self.io_loop.spawn_callback( - self._handle_connection, client_socket, address - ) - except (KeyboardInterrupt, SystemExit): - pass + def socket_queue_thread(self): + try: + while True: + try: + client_socket, address = self.socket_queue.get(True, 1) + except queue.Empty: + if self._stop.is_set(): + break + continue + # 'self.io_loop' initialized in super class + # 'salt.ext.tornado.tcpserver.TCPServer'. + # 'self._handle_connection' defined in same super class. + self.io_loop.spawn_callback( + self._handle_connection, client_socket, address + ) + except (KeyboardInterrupt, SystemExit): + pass class TCPClientKeepAlive(salt.ext.tornado.tcpclient.TCPClient): @@ -571,10 +559,7 @@ def __init__( self.io_loop = io_loop or salt.ext.tornado.ioloop.IOLoop.current() with salt.utils.asynchronous.current_ioloop(self.io_loop): self._tcp_client = TCPClientKeepAlive(opts, resolver=resolver) - self._mid = 1 - self._max_messages = int((1 << 31) - 2) # number of IDs before we wrap # TODO: max queue size - self.send_queue = [] # queue of messages to be sent self.send_future_map = {} # mapping of request_id -> Future self._read_until_future = None @@ -587,10 +572,6 @@ def __init__( self.backoff = opts.get("tcp_reconnect_backoff", 1) - def _stop_io_loop(self): - if self.io_loop is not None: - self.io_loop.stop() - # TODO: timeout inflight sessions def close(self): if self._closing: @@ -724,18 +705,7 @@ def _stream_return(self): self._stream_return_running = False def _message_id(self): - wrap = False - while self._mid in self.send_future_map: - if self._mid >= self._max_messages: - if wrap: - # this shouldn't ever happen, but just in case - raise Exception("Unable to find available messageid") - self._mid = 1 - wrap = True - else: - self._mid += 1 - - return self._mid + return str(uuid.uuid4()) # TODO: return a message object which takes care of multiplexing? def on_recv(self, callback): @@ -977,6 +947,7 @@ def publish_daemon( """ io_loop = salt.ext.tornado.ioloop.IOLoop() io_loop.make_current() + self.io_loop = io_loop # Spin up the publisher self.pub_server = pub_server = PubServer( @@ -1063,6 +1034,7 @@ class TCPReqClient(salt.transport.base.RequestClient): ttype = "tcp" def __init__(self, opts, io_loop, **kwargs): # pylint: disable=W0231 + super().__init__(opts, io_loop, **kwargs) self.opts = opts self.io_loop = io_loop parse = urllib.parse.urlparse(self.opts["master_uri"]) @@ -1079,9 +1051,11 @@ def __init__(self, opts, io_loop, **kwargs): # pylint: disable=W0231 source_ip=opts.get("source_ip"), source_port=opts.get("source_ret_port"), ) + self._closing = False @salt.ext.tornado.gen.coroutine def connect(self): + self._connect_called = True yield self.message_client.connect() @salt.ext.tornado.gen.coroutine @@ -1090,4 +1064,7 @@ def send(self, load, timeout=60): raise salt.ext.tornado.gen.Return(ret) def close(self): + if self._closing: + return + self._closing = True self.message_client.close() diff --git a/salt/transport/zeromq.py b/salt/transport/zeromq.py index f83cb1a390cc..4f54430cefd2 100644 --- a/salt/transport/zeromq.py +++ b/salt/transport/zeromq.py @@ -11,12 +11,14 @@ from random import randint import zmq.error +import zmq.eventloop.future import zmq.eventloop.zmqstream import salt.ext.tornado import salt.ext.tornado.concurrent import salt.ext.tornado.gen import salt.ext.tornado.ioloop +import salt.ext.tornado.locks import salt.payload import salt.transport.base import salt.utils.files @@ -205,6 +207,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): # TODO: this is the time to see if we are connected, maybe use the req channel to guess? @salt.ext.tornado.gen.coroutine def connect(self, publish_port, connect_callback=None, disconnect_callback=None): + self._connect_called = True self.publish_port = publish_port log.debug( "Connecting the Minion to the Master publish port, using the URI: %s", @@ -212,7 +215,8 @@ def connect(self, publish_port, connect_callback=None, disconnect_callback=None) ) log.debug("%r connecting to %s", self, self.master_pub) self._socket.connect(self.master_pub) - connect_callback(True) + if connect_callback is not None: + connect_callback(True) @property def master_pub(self): @@ -330,6 +334,8 @@ def zmq_device(self): self.clients.bind(self.uri) log.info("ReqServer workers %s", self.w_uri) self.workers.bind(self.w_uri) + if self.opts.get("ipc_mode", "") != "tcp": + os.chmod(os.path.join(self.opts["sock_dir"], "workers.ipc"), 0o600) while True: if self.clients.closed or self.workers.closed: @@ -416,13 +422,21 @@ def post_fork(self, message_handler, io_loop): ) log.info("Worker binding to socket %s", self.w_uri) self._socket.connect(self.w_uri) + if self.opts.get("ipc_mode", "") != "tcp" and os.path.isfile( + os.path.join(self.opts["sock_dir"], "workers.ipc") + ): + os.chmod(os.path.join(self.opts["sock_dir"], "workers.ipc"), 0o600) self.stream = zmq.eventloop.zmqstream.ZMQStream(self._socket, io_loop=io_loop) self.message_handler = message_handler self.stream.on_recv_stream(self.handle_message) @salt.ext.tornado.gen.coroutine def handle_message(self, stream, payload): - payload = self.decode_payload(payload) + try: + payload = self.decode_payload(payload) + except salt.exceptions.SaltDeserializationError: + self.stream.send(self.encode_payload({"msg": "bad load"})) + return # XXX: Is header really needed? reply = yield self.message_handler(payload) self.stream.send(self.encode_payload(reply)) @@ -502,58 +516,33 @@ def __init__(self, opts, addr, linger=0, io_loop=None): else: self.io_loop = io_loop - self.context = zmq.Context() + self.context = zmq.eventloop.future.Context() self.send_queue = [] - # mapping of message -> future - self.send_future_map = {} self._closing = False + self._send_future_map = {} + self.lock = salt.ext.tornado.locks.Lock() + self.ident = threading.get_ident() def connect(self): + if hasattr(self, "socket") and self.socket: + return # wire up sockets self._init_socket() - # TODO: timeout all in-flight sessions, or error def close(self): - try: - if self._closing: - return - except AttributeError: - # We must have been called from __del__ - # The python interpreter has nuked most attributes already + if self._closing: return else: self._closing = True - if hasattr(self, "stream") and self.stream is not None: - if ZMQ_VERSION_INFO < (14, 3, 0): - # stream.close() doesn't work properly on pyzmq < 14.3.0 - if self.stream.socket: - self.stream.socket.close() - self.stream.io_loop.remove_handler(self.stream.socket) - # set this to None, more hacks for messed up pyzmq - self.stream.socket = None - self.socket.close() - else: - self.stream.close(1) - self.socket = None - self.stream = None + if hasattr(self, "socket") and self.socket is not None: + self.socket.close(0) + self.socket = None if self.context.closed is False: self.context.term() - # pylint: disable=W1701 - def __del__(self): - self.close() - - # pylint: enable=W1701 - def _init_socket(self): - if hasattr(self, "stream"): - self.stream.close() # pylint: disable=E0203 - self.socket.close() # pylint: disable=E0203 - del self.stream - del self.socket - self.socket = self.context.socket(zmq.REQ) # socket options @@ -567,24 +556,8 @@ def _init_socket(self): self.socket.setsockopt(zmq.IPV6, 1) elif hasattr(zmq, "IPV4ONLY"): self.socket.setsockopt(zmq.IPV4ONLY, 0) - self.socket.linger = self.linger + self.socket.setsockopt(zmq.LINGER, self.linger) self.socket.connect(self.addr) - self.stream = zmq.eventloop.zmqstream.ZMQStream( - self.socket, io_loop=self.io_loop - ) - - def timeout_message(self, message): - """ - Handle a message timeout by removing it from the sending queue - and informing the caller - - :raises: SaltReqTimeoutError - """ - future = self.send_future_map.pop(message, None) - # In a race condition the message might have been sent by the time - # we're timing it out. Make sure the future is not None - if future is not None: - future.set_exception(SaltReqTimeoutError("Message timed out")) @salt.ext.tornado.gen.coroutine def send(self, message, timeout=None, callback=None): @@ -603,28 +576,41 @@ def handle_future(future): future.add_done_callback(handle_future) - # Add this future to the mapping - self.send_future_map[message] = future - if self.opts.get("detect_mode") is True: timeout = 1 if timeout is not None: send_timeout = self.io_loop.call_later( - timeout, self.timeout_message, message + timeout, self._timeout_message, future ) - def mark_future(msg): - if not future.done(): - data = salt.payload.loads(msg[0]) - future.set_result(data) - self.send_future_map.pop(message) + self.io_loop.spawn_callback(self._send_recv, message, future) - self.stream.on_recv(mark_future) - yield self.stream.send(message) recv = yield future + raise salt.ext.tornado.gen.Return(recv) + def _timeout_message(self, future): + if not future.done(): + future.set_exception(SaltReqTimeoutError("Message timed out")) + + @salt.ext.tornado.gen.coroutine + def _send_recv(self, message, future): + try: + with (yield self.lock.acquire()): + yield self.socket.send(message) + try: + recv = yield self.socket.recv() + except zmq.eventloop.future.CancelledError as exc: + future.set_exception(exc) + return + + if not future.done(): + data = salt.payload.loads(recv) + future.set_result(data) + except Exception as exc: # pylint: disable=broad-except + future.set_exception(exc) + class ZeroMQSocketMonitor: __EVENT_MAP = None @@ -679,7 +665,10 @@ def monitor_callback(self, msg): def stop(self): if self._socket is None: return - self._socket.disable_monitor() + try: + self._socket.disable_monitor() + except zmq.Error: + pass self._socket = None self._monitor_socket = None if self._monitor_stream is not None: @@ -898,6 +887,7 @@ class RequestClient(salt.transport.base.RequestClient): ttype = "zeromq" def __init__(self, opts, io_loop): # pylint: disable=W0231 + super().__init__(opts, io_loop) self.opts = opts master_uri = self.get_master_uri(opts) self.message_client = AsyncReqMessageClient( @@ -905,17 +895,24 @@ def __init__(self, opts, io_loop): # pylint: disable=W0231 master_uri, io_loop=io_loop, ) + self._closing = False + self._connect_called = False + @salt.ext.tornado.gen.coroutine def connect(self): + self._connect_called = True self.message_client.connect() @salt.ext.tornado.gen.coroutine def send(self, load, timeout=60): - self.connect() + yield self.connect() ret = yield self.message_client.send(load, timeout=timeout) raise salt.ext.tornado.gen.Return(ret) def close(self): + if self._closing: + return + self._closing = True self.message_client.close() @staticmethod diff --git a/salt/utils/asynchronous.py b/salt/utils/asynchronous.py index 2a858feee980..0c645bbc3bb5 100644 --- a/salt/utils/asynchronous.py +++ b/salt/utils/asynchronous.py @@ -131,7 +131,7 @@ def _target(self, key, args, kwargs, results, io_loop): result = io_loop.run_sync(lambda: getattr(self.obj, key)(*args, **kwargs)) results.append(True) results.append(result) - except Exception as exc: # pylint: disable=broad-except + except Exception: # pylint: disable=broad-except results.append(False) results.append(sys.exc_info()) diff --git a/salt/utils/aws.py b/salt/utils/aws.py index 3ee8734caba6..c74104c08db5 100644 --- a/salt/utils/aws.py +++ b/salt/utils/aws.py @@ -8,6 +8,7 @@ :depends: requests """ +import copy import hashlib import hmac import logging @@ -144,30 +145,28 @@ def creds(provider): ## if needed if provider["id"] == IROLE_CODE or provider["key"] == IROLE_CODE: # Check to see if we have cache credentials that are still good - if __Expiration__ != "": - timenow = datetime.utcnow() - timestamp = timenow.strftime("%Y-%m-%dT%H:%M:%SZ") - if timestamp < __Expiration__: - # Current timestamp less than expiration fo cached credentials - return __AccessKeyId__, __SecretAccessKey__, __Token__ - # We don't have any cached credentials, or they are expired, get them - - try: - result = get_metadata("meta-data/iam/security-credentials/") - role = result.text - except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError): - return provider["id"], provider["key"], "" - - try: - result = get_metadata("meta-data/iam/security-credentials/{}".format(role)) - except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError): - return provider["id"], provider["key"], "" + if not __Expiration__ or __Expiration__ < datetime.utcnow().strftime( + "%Y-%m-%dT%H:%M:%SZ" + ): + # We don't have any cached credentials, or they are expired, get them + try: + result = get_metadata("meta-data/iam/security-credentials/") + role = result.text + except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError): + return provider["id"], provider["key"], "" + + try: + result = get_metadata( + "meta-data/iam/security-credentials/{}".format(role) + ) + except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError): + return provider["id"], provider["key"], "" - data = result.json() - __AccessKeyId__ = data["AccessKeyId"] - __SecretAccessKey__ = data["SecretAccessKey"] - __Token__ = data["Token"] - __Expiration__ = data["Expiration"] + data = result.json() + __AccessKeyId__ = data["AccessKeyId"] + __SecretAccessKey__ = data["SecretAccessKey"] + __Token__ = data["Token"] + __Expiration__ = data["Expiration"] ret_credentials = __AccessKeyId__, __SecretAccessKey__, __Token__ else: @@ -230,9 +229,9 @@ def assumed_creds(prov_dict, role_arn, location=None): # current time in epoch seconds now = time.mktime(datetime.utcnow().timetuple()) - for key, creds in __AssumeCache__.items(): + for key, creds in copy.deepcopy(__AssumeCache__).items(): if (creds["Expiration"] - now) <= 120: - __AssumeCache__[key].delete() + del __AssumeCache__[key] if role_arn in __AssumeCache__: c = __AssumeCache__[role_arn] diff --git a/salt/utils/cache.py b/salt/utils/cache.py index a78a1f70fc91..88e7fa24000e 100644 --- a/salt/utils/cache.py +++ b/salt/utils/cache.py @@ -6,6 +6,7 @@ import logging import os import re +import shutil import time import salt.config @@ -15,6 +16,8 @@ import salt.utils.dictupdate import salt.utils.files import salt.utils.msgpack +import salt.utils.path +import salt.version from salt.utils.zeromq import zmq log = logging.getLogger(__name__) @@ -345,3 +348,32 @@ def context_cache_wrap(*args, **kwargs): return func(*args, **kwargs) return context_cache_wrap + + +def verify_cache_version(cache_path): + """ + Check that the cached version matches the Salt version. + If the cached version does not match the Salt version, wipe the cache. + + :return: ``True`` if cache version matches, otherwise ``False`` + """ + if not os.path.isdir(cache_path): + os.makedirs(cache_path) + with salt.utils.files.fopen( + salt.utils.path.join(cache_path, "cache_version"), "a+" + ) as file: + file.seek(0) + data = "\n".join(file.readlines()) + if data != salt.version.__version__: + log.warning(f"Cache version mismatch clearing: {repr(cache_path)}") + file.truncate(0) + file.write(salt.version.__version__) + for item in os.listdir(cache_path): + if item != "cache_version": + item_path = salt.utils.path.join(cache_path, item) + if os.path.isfile(item_path): + os.remove(item_path) + else: + shutil.rmtree(item_path) + return False + return True diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py index 9edf006c299c..3e026a0bb573 100644 --- a/salt/utils/cloud.py +++ b/salt/utils/cloud.py @@ -10,8 +10,8 @@ import logging import multiprocessing import os -import pipes import re +import shlex import shutil import socket import stat @@ -63,7 +63,7 @@ from pypsexec.client import Client as PsExecClient from pypsexec.exceptions import SCMRException from pypsexec.scmr import Service as ScmrService - from smbprotocol.exceptions import SMBResponseException + from smbprotocol.exceptions import CannotDelete, SMBResponseException from smbprotocol.tree import TreeConnect logging.getLogger("smbprotocol").setLevel(logging.WARNING) @@ -199,7 +199,7 @@ def __ssh_gateway_arguments(kwargs): "-oUserKnownHostsFile=/dev/null", "-oControlPath=none", str(ssh_gateway_key), - "{}@{}".format(ssh_gateway_user, ssh_gateway), + f"{ssh_gateway_user}@{ssh_gateway}", "-p", str(ssh_gateway_port), str(ssh_gateway_command), @@ -228,18 +228,18 @@ def os_script(os_, vm_=None, opts=None, minion=""): # The user provided an absolute path to the deploy script, let's use it return __render_script(os_, vm_, opts, minion) - if os.path.isabs("{}.sh".format(os_)): + if os.path.isabs(f"{os_}.sh"): # The user provided an absolute path to the deploy script, although no # extension was provided. Let's use it anyway. - return __render_script("{}.sh".format(os_), vm_, opts, minion) + return __render_script(f"{os_}.sh", vm_, opts, minion) for search_path in opts["deploy_scripts_search_path"]: if os.path.isfile(os.path.join(search_path, os_)): return __render_script(os.path.join(search_path, os_), vm_, opts, minion) - if os.path.isfile(os.path.join(search_path, "{}.sh".format(os_))): + if os.path.isfile(os.path.join(search_path, f"{os_}.sh")): return __render_script( - os.path.join(search_path, "{}.sh".format(os_)), vm_, opts, minion + os.path.join(search_path, f"{os_}.sh"), vm_, opts, minion ) # No deploy script was found, return an empty string return "" @@ -416,7 +416,7 @@ def bootstrap(vm_, opts=None): ) if key_filename is not None and not os.path.isfile(key_filename): raise SaltCloudConfigError( - "The defined ssh_keyfile '{}' does not exist".format(key_filename) + f"The defined ssh_keyfile '{key_filename}' does not exist" ) has_ssh_agent = False if ( @@ -782,8 +782,8 @@ def wait_for_port( # Don't add new hosts to the host key database "-oStrictHostKeyChecking=no", # make sure ssh can time out on connection lose - "-oServerAliveInterval={}".format(server_alive_interval), - "-oServerAliveCountMax={}".format(server_alive_count_max), + f"-oServerAliveInterval={server_alive_interval}", + f"-oServerAliveCountMax={server_alive_count_max}", # Set hosts key database path to /dev/null, i.e., non-existing "-oUserKnownHostsFile=/dev/null", # Don't re-use the SSH connection. Less failures. @@ -808,21 +808,21 @@ def wait_for_port( ] ) # Netcat command testing remote port - command = "nc -z -w5 -q0 {} {}".format(host, port) + command = f"nc -z -w5 -q0 {host} {port}" # SSH command pcmd = "ssh {} {}@{} -p {} {}".format( " ".join(ssh_args), gateway["ssh_gateway_user"], ssh_gateway, ssh_gateway_port, - pipes.quote("date"), + shlex.quote("date"), ) cmd = "ssh {} {}@{} -p {} {}".format( " ".join(ssh_args), gateway["ssh_gateway_user"], ssh_gateway, ssh_gateway_port, - pipes.quote(command), + shlex.quote(command), ) log.debug("SSH command: '%s'", cmd) @@ -893,7 +893,7 @@ def __init__( service_name=None, ): self.service_name = service_name - self._exe_file = "{}.exe".format(self.service_name) + self._exe_file = f"{self.service_name}.exe" self._client = PsExecClient(server, username, password, port, encrypt) self._client._service = ScmrService(self.service_name, self._client.session) @@ -910,7 +910,12 @@ def connect(self): return self._client.connect() def disconnect(self): - self._client.cleanup() # This removes the lingering PAExec binary + try: + # This removes any lingering PAExec binaries + self._client.cleanup() + except CannotDelete as exc: + # We shouldn't hard crash here, so just log the error + log.debug("Exception cleaning up PAexec: %r", exc) return self._client.disconnect() def create_service(self): @@ -943,7 +948,7 @@ def remove_service(self, wait_timeout=10, sleep_wait=1): # delete the PAExec executable smb_tree = TreeConnect( self._client.session, - r"\\{}\ADMIN$".format(self._client.connection.server_name), + rf"\\{self._client.connection.server_name}\ADMIN$", ) log.info("Connecting to SMB Tree %s", smb_tree.share_name) smb_tree.connect() @@ -968,10 +973,10 @@ def run_winexe_command(cmd, args, host, username, password, port=445): """ Run a command remotely via the winexe executable """ - creds = "-U '{}%{}' //{}".format(username, password, host) - logging_creds = "-U '{}%XXX-REDACTED-XXX' //{}".format(username, host) - cmd = "winexe {} {} {}".format(creds, cmd, args) - logging_cmd = "winexe {} {} {}".format(logging_creds, cmd, args) + creds = f"-U '{username}%{password}' //{host}" + logging_creds = f"-U '{username}%XXX-REDACTED-XXX' //{host}" + cmd = f"winexe {creds} {cmd} {args}" + logging_cmd = f"winexe {logging_creds} {cmd} {args}" return win_cmd(cmd, logging_command=logging_cmd) @@ -979,7 +984,7 @@ def run_psexec_command(cmd, args, host, username, password, port=445): """ Run a command remotely using the psexec protocol """ - service_name = "PS-Exec-{}".format(uuid.uuid4()) + service_name = f"PS-Exec-{uuid.uuid4()}" with Client( host, username, password, port=port, encrypt=False, service_name=service_name ) as client: @@ -1098,7 +1103,7 @@ def validate_windows_cred_winexe( """ Check if the windows credentials are valid """ - cmd = "winexe -U '{}%{}' //{} \"hostname\"".format(username, password, host) + cmd = f"winexe -U '{username}%{password}' //{host} \"hostname\"" logging_cmd = "winexe -U '{}%XXX-REDACTED-XXX' //{} \"hostname\"".format( username, host ) @@ -1202,6 +1207,16 @@ def wait_for_passwd( time.sleep(trysleep) +def _format_master_param(master): + """ + If the master is a list, we need to convert it to a comma delimited string + Otherwise, we just return master + """ + if isinstance(master, list): + return ",".join(master) + return master + + def deploy_windows( host, port=445, @@ -1230,7 +1245,7 @@ def deploy_windows( winrm_port=5986, winrm_use_ssl=True, winrm_verify_ssl=True, - **kwargs + **kwargs, ): """ Copy the install files to a remote Windows box, and execute them @@ -1289,20 +1304,20 @@ def deploy_windows( salt.utils.smb.mkdirs("salttemp", conn=smb_conn) root_dir = "ProgramData/Salt Project/Salt" - salt.utils.smb.mkdirs("{}/conf/pki/minion".format(root_dir), conn=smb_conn) + salt.utils.smb.mkdirs(f"{root_dir}/conf/pki/minion", conn=smb_conn) root_dir = "ProgramData\\Salt Project\\Salt" if minion_pub: salt.utils.smb.put_str( minion_pub, - "{}\\conf\\pki\\minion\\minion.pub".format(root_dir), + f"{root_dir}\\conf\\pki\\minion\\minion.pub", conn=smb_conn, ) if minion_pem: salt.utils.smb.put_str( minion_pem, - "{}\\conf\\pki\\minion\\minion.pem".format(root_dir), + f"{root_dir}\\conf\\pki\\minion\\minion.pem", conn=smb_conn, ) @@ -1314,7 +1329,7 @@ def deploy_windows( try: salt.utils.smb.put_file( master_sign_pub_file, - "{}\\conf\\pki\\minion\\master_sign.pub".format(root_dir), + f"{root_dir}\\conf\\pki\\minion\\master_sign.pub", conn=smb_conn, ) except Exception as e: # pylint: disable=broad-except @@ -1332,26 +1347,27 @@ def deploy_windows( installer = comps[-1] salt.utils.smb.put_file( win_installer, - "salttemp\\{}".format(installer), + f"salttemp\\{installer}", "C$", conn=smb_conn, ) + cmd = f"c:\\salttemp\\{installer}" + args = [ + "/S", + f"/master={_format_master_param(master)}", + f"/minion-name={name}", + ] + if use_winrm: - winrm_cmd( - winrm_session, - "c:\\salttemp\\{}".format(installer), - ["/S", "/master={}".format(master), "/minion-name={}".format(name)], - ) + winrm_cmd(winrm_session, cmd, args) else: - cmd = "c:\\salttemp\\{}".format(installer) - args = "/S /master={} /minion-name={}".format(master, name) stdout, stderr, ret_code = run_psexec_command( - cmd, args, host, username, password + cmd, " ".join(args), host, username, password ) if ret_code != 0: - raise Exception("Fail installer {}".format(ret_code)) + raise Exception(f"Fail installer {ret_code}") # Copy over minion_conf if minion_conf: @@ -1367,7 +1383,7 @@ def deploy_windows( if minion_grains: salt.utils.smb.put_str( salt_config_to_yaml(minion_grains, line_break="\r\n"), - "{}\\conf\\grains".format(root_dir), + f"{root_dir}\\conf\\grains", conn=smb_conn, ) # Add special windows minion configuration @@ -1384,7 +1400,7 @@ def deploy_windows( minion_conf = dict(minion_conf, **windows_minion_conf) salt.utils.smb.put_str( salt_config_to_yaml(minion_conf, line_break="\r\n"), - "{}\\conf\\minion".format(root_dir), + f"{root_dir}\\conf\\minion", conn=smb_conn, ) # Delete C:\salttmp\ and installer file @@ -1394,7 +1410,7 @@ def deploy_windows( winrm_cmd(winrm_session, "rmdir", ["/Q", "/S", "C:\\salttemp\\"]) else: salt.utils.smb.delete_file( - "salttemp\\{}".format(installer), "C$", conn=smb_conn + f"salttemp\\{installer}", "C$", conn=smb_conn ) salt.utils.smb.delete_directory("salttemp", "C$", conn=smb_conn) # Shell out to psexec to ensure salt-minion service started @@ -1418,8 +1434,8 @@ def deploy_windows( # Fire deploy action fire_event( "event", - "{} has been deployed at {}".format(name, host), - "salt/cloud/{}/deploy_windows".format(name), + f"{name} has been deployed at {host}", + f"salt/cloud/{name}/deploy_windows", args={"name": name}, sock_dir=opts.get("sock_dir", os.path.join(__opts__["sock_dir"], "master")), transport=opts.get("transport", "zeromq"), @@ -1469,7 +1485,7 @@ def deploy_script( master_sign_pub_file=None, cloud_grains=None, force_minion_config=False, - **kwargs + **kwargs, ): """ Copy a deploy script to a remote server, execute it, and remove it @@ -1485,7 +1501,7 @@ def deploy_script( ) if key_filename is not None and not os.path.isfile(key_filename): raise SaltCloudConfigError( - "The defined key_filename '{}' does not exist".format(key_filename) + f"The defined key_filename '{key_filename}' does not exist" ) gateway = None @@ -1532,35 +1548,28 @@ def deploy_script( ssh_kwargs["password"] = password if root_cmd( - "test -e '{}'".format(tmp_dir), - tty, - sudo, - allow_failure=True, - **ssh_kwargs + f"test -e '{tmp_dir}'", tty, sudo, allow_failure=True, **ssh_kwargs ): ret = root_cmd( - "sh -c \"( mkdir -p -m 700 '{}' )\"".format(tmp_dir), + f"sh -c \"( mkdir -p -m 700 '{tmp_dir}' )\"", tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) if ret: raise SaltCloudSystemExit( - "Can't create temporary directory in {} !".format(tmp_dir) + f"Can't create temporary directory in {tmp_dir} !" ) if sudo: comps = tmp_dir.lstrip("/").rstrip("/").split("/") if comps: if len(comps) > 1 or comps[0] != "tmp": ret = root_cmd( - 'chown {} "{}"'.format(username, tmp_dir), - tty, - sudo, - **ssh_kwargs + f'chown {username} "{tmp_dir}"', tty, sudo, **ssh_kwargs ) if ret: raise SaltCloudSystemExit( - "Cant set {} ownership on {}".format(username, tmp_dir) + f"Cant set {username} ownership on {tmp_dir}" ) if not isinstance(file_map, dict): @@ -1590,15 +1599,13 @@ def deploy_script( remote_dir = os.path.dirname(remote_file) if remote_dir not in remote_dirs: - root_cmd( - "mkdir -p '{}'".format(remote_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"mkdir -p '{remote_dir}'", tty, sudo, **ssh_kwargs) if ssh_kwargs["username"] != "root": root_cmd( "chown {} '{}'".format(ssh_kwargs["username"], remote_dir), tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) remote_dirs.append(remote_dir) ssh_file(opts, remote_file, kwargs=ssh_kwargs, local_file=local_file) @@ -1606,21 +1613,21 @@ def deploy_script( # Minion configuration if minion_pem: - ssh_file(opts, "{}/minion.pem".format(tmp_dir), minion_pem, ssh_kwargs) + ssh_file(opts, f"{tmp_dir}/minion.pem", minion_pem, ssh_kwargs) ret = root_cmd( - "chmod 600 '{}/minion.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs + f"chmod 600 '{tmp_dir}/minion.pem'", tty, sudo, **ssh_kwargs ) if ret: raise SaltCloudSystemExit( - "Can't set perms on {}/minion.pem".format(tmp_dir) + f"Can't set perms on {tmp_dir}/minion.pem" ) if minion_pub: - ssh_file(opts, "{}/minion.pub".format(tmp_dir), minion_pub, ssh_kwargs) + ssh_file(opts, f"{tmp_dir}/minion.pub", minion_pub, ssh_kwargs) if master_sign_pub_file: ssh_file( opts, - "{}/master_sign.pub".format(tmp_dir), + f"{tmp_dir}/master_sign.pub", kwargs=ssh_kwargs, local_file=master_sign_pub_file, ) @@ -1638,7 +1645,7 @@ def deploy_script( if minion_grains: ssh_file( opts, - "{}/grains".format(tmp_dir), + f"{tmp_dir}/grains", salt_config_to_yaml(minion_grains), ssh_kwargs, ) @@ -1646,24 +1653,22 @@ def deploy_script( minion_conf["grains"] = {"salt-cloud": cloud_grains} ssh_file( opts, - "{}/minion".format(tmp_dir), + f"{tmp_dir}/minion", salt_config_to_yaml(minion_conf), ssh_kwargs, ) # Master configuration if master_pem: - ssh_file(opts, "{}/master.pem".format(tmp_dir), master_pem, ssh_kwargs) + ssh_file(opts, f"{tmp_dir}/master.pem", master_pem, ssh_kwargs) ret = root_cmd( - "chmod 600 '{}/master.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs + f"chmod 600 '{tmp_dir}/master.pem'", tty, sudo, **ssh_kwargs ) if ret: - raise SaltCloudSystemExit( - "Cant set perms on {}/master.pem".format(tmp_dir) - ) + raise SaltCloudSystemExit(f"Cant set perms on {tmp_dir}/master.pem") if master_pub: - ssh_file(opts, "{}/master.pub".format(tmp_dir), master_pub, ssh_kwargs) + ssh_file(opts, f"{tmp_dir}/master.pub", master_pub, ssh_kwargs) if master_conf: if not isinstance(master_conf, dict): @@ -1677,34 +1682,31 @@ def deploy_script( ssh_file( opts, - "{}/master".format(tmp_dir), + f"{tmp_dir}/master", salt_config_to_yaml(master_conf), ssh_kwargs, ) # XXX: We need to make these paths configurable - preseed_minion_keys_tempdir = "{}/preseed-minion-keys".format(tmp_dir) + preseed_minion_keys_tempdir = f"{tmp_dir}/preseed-minion-keys" if preseed_minion_keys is not None: # Create remote temp dir ret = root_cmd( - "mkdir '{}'".format(preseed_minion_keys_tempdir), - tty, - sudo, - **ssh_kwargs + f"mkdir '{preseed_minion_keys_tempdir}'", tty, sudo, **ssh_kwargs ) if ret: raise SaltCloudSystemExit( - "Cant create {}".format(preseed_minion_keys_tempdir) + f"Cant create {preseed_minion_keys_tempdir}" ) ret = root_cmd( - "chmod 700 '{}'".format(preseed_minion_keys_tempdir), + f"chmod 700 '{preseed_minion_keys_tempdir}'", tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) if ret: raise SaltCloudSystemExit( - "Can't set perms on {}".format(preseed_minion_keys_tempdir) + f"Can't set perms on {preseed_minion_keys_tempdir}" ) if ssh_kwargs["username"] != "root": root_cmd( @@ -1713,7 +1715,7 @@ def deploy_script( ), tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) # Copy pre-seed minion keys @@ -1723,10 +1725,10 @@ def deploy_script( if ssh_kwargs["username"] != "root": root_cmd( - "chown -R root '{}'".format(preseed_minion_keys_tempdir), + f"chown -R root '{preseed_minion_keys_tempdir}'", tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) if ret: raise SaltCloudSystemExit( @@ -1740,25 +1742,21 @@ def deploy_script( for command in preflight_cmds: cmd_ret = root_cmd(command, tty, sudo, **ssh_kwargs) if cmd_ret: - raise SaltCloudSystemExit( - "Pre-flight command failed: '{}'".format(command) - ) + raise SaltCloudSystemExit(f"Pre-flight command failed: '{command}'") # The actual deploy script if script: # got strange escaping issues with sudoer, going onto a # subshell fixes that - ssh_file(opts, "{}/deploy.sh".format(tmp_dir), script, ssh_kwargs) + ssh_file(opts, f"{tmp_dir}/deploy.sh", script, ssh_kwargs) ret = root_cmd( - "sh -c \"( chmod +x '{}/deploy.sh' )\";exit $?".format(tmp_dir), + f"sh -c \"( chmod +x '{tmp_dir}/deploy.sh' )\";exit $?", tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) if ret: - raise SaltCloudSystemExit( - "Can't set perms on {}/deploy.sh".format(tmp_dir) - ) + raise SaltCloudSystemExit(f"Can't set perms on {tmp_dir}/deploy.sh") time_used = time.mktime(time.localtime()) - time.mktime(starttime) newtimeout = timeout - time_used @@ -1774,7 +1772,7 @@ def deploy_script( kwargs=dict( name=name, sock_dir=sock_dir, timeout=newtimeout, queue=queue ), - name="DeployScriptCheckAuth({})".format(name), + name=f"DeployScriptCheckAuth({name})", ) log.debug("Starting new process to wait for salt-minion") process.start() @@ -1782,7 +1780,7 @@ def deploy_script( # Run the deploy script if script: if "bootstrap-salt" in script: - deploy_command += " -c '{}'".format(tmp_dir) + deploy_command += f" -c '{tmp_dir}'" if force_minion_config: deploy_command += " -F" if make_syndic is True: @@ -1794,9 +1792,9 @@ def deploy_script( if keep_tmp is True: deploy_command += " -K" if preseed_minion_keys is not None: - deploy_command += " -k '{}'".format(preseed_minion_keys_tempdir) + deploy_command += f" -k '{preseed_minion_keys_tempdir}'" if script_args: - deploy_command += " {}".format(script_args) + deploy_command += f" {script_args}" if script_env: if not isinstance(script_env, dict): @@ -1815,15 +1813,15 @@ def deploy_script( # Upload our environ setter wrapper ssh_file( opts, - "{}/environ-deploy-wrapper.sh".format(tmp_dir), + f"{tmp_dir}/environ-deploy-wrapper.sh", "\n".join(environ_script_contents), ssh_kwargs, ) root_cmd( - "chmod +x '{}/environ-deploy-wrapper.sh'".format(tmp_dir), + f"chmod +x '{tmp_dir}/environ-deploy-wrapper.sh'", tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) # The deploy command is now our wrapper deploy_command = "'{}/environ-deploy-wrapper.sh'".format( @@ -1831,22 +1829,20 @@ def deploy_script( ) if root_cmd(deploy_command, tty, sudo, **ssh_kwargs) != 0: raise SaltCloudSystemExit( - "Executing the command '{}' failed".format(deploy_command) + f"Executing the command '{deploy_command}' failed" ) log.debug("Executed command '%s'", deploy_command) # Remove the deploy script if not keep_tmp: - root_cmd( - "rm -f '{}/deploy.sh'".format(tmp_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"rm -f '{tmp_dir}/deploy.sh'", tty, sudo, **ssh_kwargs) log.debug("Removed %s/deploy.sh", tmp_dir) if script_env: root_cmd( - "rm -f '{}/environ-deploy-wrapper.sh'".format(tmp_dir), + f"rm -f '{tmp_dir}/environ-deploy-wrapper.sh'", tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) log.debug("Removed %s/environ-deploy-wrapper.sh", tmp_dir) @@ -1855,57 +1851,40 @@ def deploy_script( else: # Remove minion configuration if minion_pub: - root_cmd( - "rm -f '{}/minion.pub'".format(tmp_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"rm -f '{tmp_dir}/minion.pub'", tty, sudo, **ssh_kwargs) log.debug("Removed %s/minion.pub", tmp_dir) if minion_pem: - root_cmd( - "rm -f '{}/minion.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"rm -f '{tmp_dir}/minion.pem'", tty, sudo, **ssh_kwargs) log.debug("Removed %s/minion.pem", tmp_dir) if minion_conf: - root_cmd( - "rm -f '{}/grains'".format(tmp_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"rm -f '{tmp_dir}/grains'", tty, sudo, **ssh_kwargs) log.debug("Removed %s/grains", tmp_dir) - root_cmd( - "rm -f '{}/minion'".format(tmp_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"rm -f '{tmp_dir}/minion'", tty, sudo, **ssh_kwargs) log.debug("Removed %s/minion", tmp_dir) if master_sign_pub_file: root_cmd( - "rm -f {}/master_sign.pub".format(tmp_dir), - tty, - sudo, - **ssh_kwargs + f"rm -f {tmp_dir}/master_sign.pub", tty, sudo, **ssh_kwargs ) log.debug("Removed %s/master_sign.pub", tmp_dir) # Remove master configuration if master_pub: - root_cmd( - "rm -f '{}/master.pub'".format(tmp_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"rm -f '{tmp_dir}/master.pub'", tty, sudo, **ssh_kwargs) log.debug("Removed %s/master.pub", tmp_dir) if master_pem: - root_cmd( - "rm -f '{}/master.pem'".format(tmp_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"rm -f '{tmp_dir}/master.pem'", tty, sudo, **ssh_kwargs) log.debug("Removed %s/master.pem", tmp_dir) if master_conf: - root_cmd( - "rm -f '{}/master'".format(tmp_dir), tty, sudo, **ssh_kwargs - ) + root_cmd(f"rm -f '{tmp_dir}/master'", tty, sudo, **ssh_kwargs) log.debug("Removed %s/master", tmp_dir) # Remove pre-seed keys directory if preseed_minion_keys is not None: root_cmd( - "rm -rf '{}'".format(preseed_minion_keys_tempdir), + f"rm -rf '{preseed_minion_keys_tempdir}'", tty, sudo, - **ssh_kwargs + **ssh_kwargs, ) log.debug("Removed %s", preseed_minion_keys_tempdir) @@ -1920,15 +1899,13 @@ def deploy_script( # for line in output: # print(line) log.info("Executing %s on the salt-minion", start_action) - root_cmd( - "salt-call {}".format(start_action), tty, sudo, **ssh_kwargs - ) + root_cmd(f"salt-call {start_action}", tty, sudo, **ssh_kwargs) log.info("Finished executing %s on the salt-minion", start_action) # Fire deploy action fire_event( "event", - "{} has been deployed at {}".format(name, host), - "salt/cloud/{}/deploy_script".format(name), + f"{name} has been deployed at {host}", + f"salt/cloud/{name}/deploy_script", args={"name": name, "host": host}, sock_dir=opts.get( "sock_dir", os.path.join(__opts__["sock_dir"], "master") @@ -1961,7 +1938,7 @@ def run_inline_script( tty=None, opts=None, tmp_dir="/tmp/.saltcloud-inline_script", - **kwargs + **kwargs, ): """ Run the inline script commands, one by one @@ -2018,11 +1995,11 @@ def run_inline_script( # TODO: check edge cases (e.g. ssh gateways, salt deploy disabled, etc.) if ( root_cmd( - 'test -e \\"{}\\"'.format(tmp_dir), + f'test -e \\"{tmp_dir}\\"', tty, sudo, allow_failure=True, - **ssh_kwargs + **ssh_kwargs, ) and inline_script ): @@ -2030,11 +2007,11 @@ def run_inline_script( for cmd_line in inline_script: log.info("Executing inline command: %s", cmd_line) ret = root_cmd( - 'sh -c "( {} )"'.format(cmd_line), + f'sh -c "( {cmd_line} )"', tty, sudo, allow_failure=True, - **ssh_kwargs + **ssh_kwargs, ) if ret: log.info("[%s] Output: %s", cmd_line, ret) @@ -2138,7 +2115,7 @@ def _exec_ssh_cmd(cmd, error_msg=None, allow_failure=False, **kwargs): time.sleep(0.5) if proc.exitstatus != 0 and allow_failure is False: raise SaltCloudSystemExit( - "Command '{}' failed. Exit code: {}".format(cmd, proc.exitstatus) + f"Command '{cmd}' failed. Exit code: {proc.exitstatus}" ) return proc.exitstatus except salt.utils.vt.TerminalException as err: @@ -2241,7 +2218,7 @@ def scp_file(dest_path, contents=None, kwargs=None, local_file=None): cmd, error_msg="Failed to upload file '{0}': {1}\n{2}", password_retries=3, - **kwargs + **kwargs, ) finally: if contents is not None: @@ -2359,7 +2336,7 @@ def sftp_file(dest_path, contents=None, kwargs=None, local_file=None): cmd, error_msg="Failed to upload file '{0}': {1}\n{2}", password_retries=3, - **kwargs + **kwargs, ) finally: if contents is not None: @@ -2419,11 +2396,11 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs): if sudo: if sudo_password is None: - command = "sudo {}".format(command) + command = f"sudo {command}" logging_command = command else: - logging_command = 'sudo -S "XXX-REDACTED-XXX" {}'.format(command) - command = "sudo -S {}".format(command) + logging_command = f'sudo -S "XXX-REDACTED-XXX" {command}' + command = f"sudo -S {command}" log.debug("Using sudo to run command %s", logging_command) @@ -2442,9 +2419,9 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs): ssh_args.extend( [ # Don't add new hosts to the host key database - "-oStrictHostKeyChecking={}".format(host_key_checking), + f"-oStrictHostKeyChecking={host_key_checking}", # Set hosts key database path to /dev/null, i.e., non-existing - "-oUserKnownHostsFile={}".format(known_hosts_file), + f"-oUserKnownHostsFile={known_hosts_file}", # Don't re-use the SSH connection. Less failures. "-oControlPath=none", ] @@ -2477,12 +2454,12 @@ def root_cmd(command, tty, sudo, allow_failure=False, **kwargs): cmd = "ssh {0} {1[username]}@{1[hostname]} ".format(" ".join(ssh_args), kwargs) logging_command = cmd + logging_command - cmd = cmd + pipes.quote(command) + cmd = cmd + shlex.quote(command) hard_timeout = kwargs.get("hard_timeout") if hard_timeout is not None: - logging_command = "timeout {} {}".format(hard_timeout, logging_command) - cmd = "timeout {} {}".format(hard_timeout, cmd) + logging_command = f"timeout {hard_timeout} {logging_command}" + cmd = f"timeout {hard_timeout} {cmd}" log.debug("SSH command: '%s'", logging_command) @@ -2504,7 +2481,7 @@ def check_auth(name, sock_dir=None, queue=None, timeout=300): ret = event.get_event(full=True) if ret is None: continue - if ret["tag"] == "salt/minion/{}/start".format(name): + if ret["tag"] == f"salt/minion/{name}/start": queue.put(name) newtimeout = 0 log.debug("Minion %s is ready to receive commands", name) @@ -2550,7 +2527,7 @@ def check_name(name, safe_chars): """ Check whether the specified name contains invalid characters """ - regexp = re.compile("[^{}]".format(safe_chars)) + regexp = re.compile(f"[^{safe_chars}]") if regexp.search(name): raise SaltCloudException( "{} contains characters not supported by this cloud provider. " @@ -2844,7 +2821,7 @@ def request_minion_cachedir( "provider": provider, } - fname = "{}.p".format(minion_id) + fname = f"{minion_id}.p" path = os.path.join(base, "requested", fname) with salt.utils.files.fopen(path, "wb") as fh_: salt.utils.msgpack.dump(data, fh_, encoding=MSGPACK_ENCODING) @@ -2875,7 +2852,7 @@ def change_minion_cachedir( if base is None: base = __opts__["cachedir"] - fname = "{}.p".format(minion_id) + fname = f"{minion_id}.p" path = os.path.join(base, cachedir, fname) with salt.utils.files.fopen(path, "r") as fh_: @@ -2898,7 +2875,7 @@ def activate_minion_cachedir(minion_id, base=None): if base is None: base = __opts__["cachedir"] - fname = "{}.p".format(minion_id) + fname = f"{minion_id}.p" src = os.path.join(base, "requested", fname) dst = os.path.join(base, "active") shutil.move(src, dst) @@ -2920,7 +2897,7 @@ def delete_minion_cachedir(minion_id, provider, opts, base=None): base = __opts__["cachedir"] driver = next(iter(__opts__["providers"][provider].keys())) - fname = "{}.p".format(minion_id) + fname = f"{minion_id}.p" for cachedir in "requested", "active": path = os.path.join(base, cachedir, driver, provider, fname) log.debug("path: %s", path) @@ -3013,7 +2990,7 @@ def update_bootstrap(config, url=None): # in last case, assuming we got a script content else: script_content = url - script_name = "{}.sh".format(hashlib.sha1(script_content).hexdigest()) + script_name = f"{hashlib.sha1(script_content).hexdigest()}.sh" if not script_content: raise ValueError("No content in bootstrap script !") @@ -3107,7 +3084,7 @@ def cache_node_list(nodes, provider, opts): for node in nodes: diff_node_cache(prov_dir, node, nodes[node], opts) - path = os.path.join(prov_dir, "{}.p".format(node)) + path = os.path.join(prov_dir, f"{node}.p") with salt.utils.files.fopen(path, "wb") as fh_: salt.utils.msgpack.dump(nodes[node], fh_, encoding=MSGPACK_ENCODING) @@ -3162,7 +3139,7 @@ def missing_node_cache(prov_dir, node_list, provider, opts): fire_event( "event", "cached node missing from provider", - "salt/cloud/{}/cache_node_missing".format(node), + f"salt/cloud/{node}/cache_node_missing", args={"missing node": node}, sock_dir=opts.get( "sock_dir", os.path.join(__opts__["sock_dir"], "master") @@ -3190,7 +3167,7 @@ def diff_node_cache(prov_dir, node, new_data, opts): if node is None: return - path = "{}.p".format(os.path.join(prov_dir, node)) + path = f"{os.path.join(prov_dir, node)}.p" if not os.path.exists(path): event_data = _strip_cache_events(new_data, opts) @@ -3198,7 +3175,7 @@ def diff_node_cache(prov_dir, node, new_data, opts): fire_event( "event", "new node found", - "salt/cloud/{}/cache_node_new".format(node), + f"salt/cloud/{node}/cache_node_new", args={"new_data": event_data}, sock_dir=opts.get("sock_dir", os.path.join(__opts__["sock_dir"], "master")), transport=opts.get("transport", "zeromq"), @@ -3222,7 +3199,7 @@ def diff_node_cache(prov_dir, node, new_data, opts): fire_event( "event", "node data differs", - "salt/cloud/{}/cache_node_diff".format(node), + f"salt/cloud/{node}/cache_node_diff", args={ "new_data": _strip_cache_events(new_data, opts), "cache_data": _strip_cache_events(cache_data, opts), @@ -3266,7 +3243,7 @@ def _salt_cloud_force_ascii(exc): errors. """ if not isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)): - raise TypeError("Can't handle {}".format(exc)) + raise TypeError(f"Can't handle {exc}") unicode_trans = { # Convert non-breaking space to space @@ -3326,7 +3303,7 @@ def store_password_in_keyring(credential_id, username, password=None): # pylint: enable=import-error if password is None: - prompt = "Please enter password for {}: ".format(credential_id) + prompt = f"Please enter password for {credential_id}: " try: password = getpass.getpass(prompt) except EOFError: diff --git a/salt/utils/crypt.py b/salt/utils/crypt.py index ac3c220cf312..b89a477a380c 100644 --- a/salt/utils/crypt.py +++ b/salt/utils/crypt.py @@ -143,6 +143,13 @@ def pem_finger(path=None, key=None, sum_type="sha256"): with salt.utils.files.fopen(path, "rb") as fp_: key = b"".join([x for x in fp_.readlines() if x.strip()][1:-1]) + # We should never have \r\n in a key file. This will cause the + # finger to be different even though the only difference is the line + # endings. + key = key.replace(b"\r\n", b"\n") + + if not isinstance(key, bytes): + key = key.encode("utf-8") pre = getattr(hashlib, sum_type)(key).hexdigest() finger = "" diff --git a/salt/utils/event.py b/salt/utils/event.py index be3caaa0da3f..a1de7e5ab4fb 100644 --- a/salt/utils/event.py +++ b/salt/utils/event.py @@ -902,7 +902,8 @@ def _fire_ret_load_specific_fun(self, load, fun_index=0): data["success"] = False data["return"] = "Error: {}.{}".format(tags[0], tags[-1]) data["fun"] = fun - data["user"] = load["user"] + if "user" in load: + data["user"] = load["user"] self.fire_event( data, tagify([load["jid"], "sub", load["id"], "error", fun], "job"), @@ -1212,7 +1213,7 @@ def run(self): ): os.chmod( # nosec os.path.join(self.opts["sock_dir"], "master_event_pub.ipc"), - 0o666, + 0o660, ) atexit.register(self.close) diff --git a/salt/utils/extmods.py b/salt/utils/extmods.py index c3574436b6c9..6a4d5c14440c 100644 --- a/salt/utils/extmods.py +++ b/salt/utils/extmods.py @@ -11,6 +11,7 @@ import salt.utils.hashutils import salt.utils.path import salt.utils.url +from salt.config import DEFAULT_HASH_TYPE log = logging.getLogger(__name__) @@ -32,7 +33,14 @@ def _listdir_recursively(rootdir): return file_list -def sync(opts, form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None): +def sync( + opts, + form, + saltenv=None, + extmod_whitelist=None, + extmod_blacklist=None, + force_local=False, +): """ Sync custom modules into the extension_modules directory """ @@ -75,58 +83,60 @@ def sync(opts, form, saltenv=None, extmod_whitelist=None, extmod_blacklist=None) "Cannot create cache module directory %s. Check permissions.", mod_dir, ) - fileclient = salt.fileclient.get_file_client(opts) - for sub_env in saltenv: - log.info("Syncing %s for environment '%s'", form, sub_env) - cache = [] - log.info("Loading cache from %s, for %s", source, sub_env) - # Grab only the desired files (.py, .pyx, .so) - cache.extend( - fileclient.cache_dir( - source, - sub_env, - include_empty=False, - include_pat=r"E@\.(pyx?|so|zip)$", - exclude_pat=None, + with salt.fileclient.get_file_client( + opts, pillar=False, force_local=force_local + ) as fileclient: + for sub_env in saltenv: + log.info("Syncing %s for environment '%s'", form, sub_env) + cache = [] + log.info("Loading cache from %s, for %s", source, sub_env) + # Grab only the desired files (.py, .pyx, .so) + cache.extend( + fileclient.cache_dir( + source, + sub_env, + include_empty=False, + include_pat=r"E@\.(pyx?|so|zip)$", + exclude_pat=None, + ) ) - ) - local_cache_dir = os.path.join( - opts["cachedir"], "files", sub_env, "_{}".format(form) - ) - log.debug("Local cache dir: '%s'", local_cache_dir) - for fn_ in cache: - relpath = os.path.relpath(fn_, local_cache_dir) - relname = os.path.splitext(relpath)[0].replace(os.sep, ".") - if ( - extmod_whitelist - and form in extmod_whitelist - and relname not in extmod_whitelist[form] - ): - continue - if ( - extmod_blacklist - and form in extmod_blacklist - and relname in extmod_blacklist[form] - ): - continue - remote.add(relpath) - dest = os.path.join(mod_dir, relpath) - log.info("Copying '%s' to '%s'", fn_, dest) - if os.path.isfile(dest): - # The file is present, if the sum differs replace it - hash_type = opts.get("hash_type", "md5") - src_digest = salt.utils.hashutils.get_hash(fn_, hash_type) - dst_digest = salt.utils.hashutils.get_hash(dest, hash_type) - if src_digest != dst_digest: - # The downloaded file differs, replace! + local_cache_dir = os.path.join( + opts["cachedir"], "files", sub_env, "_{}".format(form) + ) + log.debug("Local cache dir: '%s'", local_cache_dir) + for fn_ in cache: + relpath = os.path.relpath(fn_, local_cache_dir) + relname = os.path.splitext(relpath)[0].replace(os.sep, ".") + if ( + extmod_whitelist + and form in extmod_whitelist + and relname not in extmod_whitelist[form] + ): + continue + if ( + extmod_blacklist + and form in extmod_blacklist + and relname in extmod_blacklist[form] + ): + continue + remote.add(relpath) + dest = os.path.join(mod_dir, relpath) + log.info("Copying '%s' to '%s'", fn_, dest) + if os.path.isfile(dest): + # The file is present, if the sum differs replace it + hash_type = opts.get("hash_type", DEFAULT_HASH_TYPE) + src_digest = salt.utils.hashutils.get_hash(fn_, hash_type) + dst_digest = salt.utils.hashutils.get_hash(dest, hash_type) + if src_digest != dst_digest: + # The downloaded file differs, replace! + shutil.copyfile(fn_, dest) + ret.append("{}.{}".format(form, relname)) + else: + dest_dir = os.path.dirname(dest) + if not os.path.isdir(dest_dir): + os.makedirs(dest_dir) shutil.copyfile(fn_, dest) ret.append("{}.{}".format(form, relname)) - else: - dest_dir = os.path.dirname(dest) - if not os.path.isdir(dest_dir): - os.makedirs(dest_dir) - shutil.copyfile(fn_, dest) - ret.append("{}.{}".format(form, relname)) touched = bool(ret) if opts["clean_dynamic_modules"] is True: diff --git a/salt/utils/gitfs.py b/salt/utils/gitfs.py index cc9895d8ab9c..a197921f6efb 100644 --- a/salt/utils/gitfs.py +++ b/salt/utils/gitfs.py @@ -2,7 +2,7 @@ Classes which provide the shared base for GitFS, git_pillar, and winrepo """ - +import base64 import contextlib import copy import errno @@ -11,6 +11,7 @@ import hashlib import io import logging +import multiprocessing import os import shlex import shutil @@ -22,6 +23,7 @@ import salt.ext.tornado.ioloop import salt.fileserver +import salt.utils.cache import salt.utils.configparser import salt.utils.data import salt.utils.files @@ -34,6 +36,7 @@ import salt.utils.url import salt.utils.user import salt.utils.versions +from salt.config import DEFAULT_HASH_TYPE from salt.config import DEFAULT_MASTER_OPTS as _DEFAULT_MASTER_OPTS from salt.exceptions import FileserverConfigError, GitLockError, get_error_message from salt.utils.event import tagify @@ -225,6 +228,10 @@ class GitProvider: invoking the parent class' __init__. """ + # master lock should only be locked for very short periods of times "seconds" + # the master lock should be used when ever git provider reads or writes to one if it locks + _master_lock = multiprocessing.Lock() + def __init__( self, opts, @@ -245,7 +252,6 @@ def __init__( val_cb=lambda x, y: str(y), ) self.conf = copy.deepcopy(per_remote_defaults) - # Remove the 'salt://' from the beginning of any globally-defined # per-saltenv mountpoints for saltenv, saltenv_conf in self.global_saltenv.items(): @@ -449,17 +455,38 @@ def __init__( self.id, ) failhard(self.role) - - hash_type = getattr(hashlib, self.opts.get("hash_type", "md5")) - # We loaded this data from yaml configuration files, so, its safe - # to use UTF-8 - self.hash = hash_type(self.id.encode("utf-8")).hexdigest() - self.cachedir_basename = getattr(self, "name", self.hash) - self.cachedir = salt.utils.path.join(cache_root, self.cachedir_basename) - self.linkdir = salt.utils.path.join(cache_root, "links", self.cachedir_basename) - - if not os.path.isdir(self.cachedir): - os.makedirs(self.cachedir) + if hasattr(self, "name"): + self._cache_basehash = self.name + else: + hash_type = getattr(hashlib, self.opts.get("hash_type", DEFAULT_HASH_TYPE)) + # We loaded this data from yaml configuration files, so, its safe + # to use UTF-8 + self._cache_basehash = str( + base64.b64encode(hash_type(self.id.encode("utf-8")).digest()), + encoding="ascii", # base64 only outputs ascii + ).replace( + "/", "_" + ) # replace "/" with "_" to not cause trouble with file system + self._cache_hash = salt.utils.path.join(cache_root, self._cache_basehash) + self._cache_basename = "_" + if self.id.startswith("__env__"): + try: + self._cache_basename = self.get_checkout_target() + except AttributeError: + log.critical(f"__env__ cant generate basename: {self.role} {self.id}") + failhard(self.role) + self._cache_full_basename = salt.utils.path.join( + self._cache_basehash, self._cache_basename + ) + self._cachedir = salt.utils.path.join(self._cache_hash, self._cache_basename) + self._salt_working_dir = salt.utils.path.join( + cache_root, "work", self._cache_full_basename + ) + self._linkdir = salt.utils.path.join( + cache_root, "links", self._cache_full_basename + ) + if not os.path.isdir(self._cachedir): + os.makedirs(self._cachedir) try: self.new = self.init_remote() @@ -471,6 +498,32 @@ def __init__( msg += " Perhaps git is not available." log.critical(msg, exc_info=True) failhard(self.role) + self.verify_auth() + self.setup_callbacks() + if not os.path.isdir(self._salt_working_dir): + os.makedirs(self._salt_working_dir) + self.fetch_request_check() + + def get_cache_basehash(self): + return self._cache_basehash + + def get_cache_hash(self): + return self._cache_hash + + def get_cache_basename(self): + return self._cache_basename + + def get_cache_full_basename(self): + return self._cache_full_basename + + def get_cachedir(self): + return self._cachedir + + def get_linkdir(self): + return self._linkdir + + def get_salt_working_dir(self): + return self._salt_working_dir def _get_envs_from_ref_paths(self, refs): """ @@ -512,7 +565,7 @@ def _check_ref(env_set, rname): return ret def _get_lock_file(self, lock_type="update"): - return salt.utils.path.join(self.gitdir, lock_type + ".lk") + return salt.utils.path.join(self._salt_working_dir, lock_type + ".lk") @classmethod def add_conf_overlay(cls, name): @@ -599,7 +652,7 @@ def check_root(self): # No need to pass an environment to self.root() here since per-saltenv # configuration is a gitfs-only feature and check_root() is not used # for gitfs. - root_dir = salt.utils.path.join(self.cachedir, self.root()).rstrip(os.sep) + root_dir = salt.utils.path.join(self._cachedir, self.root()).rstrip(os.sep) if os.path.isdir(root_dir): return root_dir log.error( @@ -662,6 +715,19 @@ def clear_lock(self, lock_type="update"): """ Clear update.lk """ + if self.__class__._master_lock.acquire(timeout=60) is False: + # if gitfs works right we should never see this timeout error. + log.error("gitfs master lock timeout!") + raise TimeoutError("gitfs master lock timeout!") + try: + return self._clear_lock(lock_type) + finally: + self.__class__._master_lock.release() + + def _clear_lock(self, lock_type="update"): + """ + Clear update.lk without MultiProcessing locks + """ lock_file = self._get_lock_file(lock_type=lock_type) def _add_error(errlist, exc): @@ -758,7 +824,7 @@ def enforce_git_config(self): desired_refspecs, ) if refspecs != desired_refspecs: - conf.set_multivar(remote_section, "fetch", self.refspecs) + conf.set_multivar(remote_section, "fetch", desired_refspecs) log.debug( "Refspecs for %s remote '%s' set to %s", self.role, @@ -837,6 +903,20 @@ def _lock(self, lock_type="update", failhard=False): """ Place a lock file if (and only if) it does not already exist. """ + if self.__class__._master_lock.acquire(timeout=60) is False: + # if gitfs works right we should never see this timeout error. + log.error("gitfs master lock timeout!") + raise TimeoutError("gitfs master lock timeout!") + try: + return self.__lock(lock_type, failhard) + finally: + self.__class__._master_lock.release() + + def __lock(self, lock_type="update", failhard=False): + """ + Place a lock file if (and only if) it does not already exist. + Without MultiProcessing locks. + """ try: fh_ = os.open( self._get_lock_file(lock_type), os.O_CREAT | os.O_EXCL | os.O_WRONLY @@ -903,9 +983,9 @@ def _lock(self, lock_type="update", failhard=False): lock_type, lock_file, ) - success, fail = self.clear_lock() + success, fail = self._clear_lock() if success: - return self._lock(lock_type="update", failhard=failhard) + return self.__lock(lock_type="update", failhard=failhard) elif failhard: raise return @@ -997,7 +1077,7 @@ def init_remote(self): """ raise NotImplementedError() - def checkout(self): + def checkout(self, fetch_on_fail=True): """ This function must be overridden in a sub-class """ @@ -1120,6 +1200,21 @@ def get_url(self): else: self.url = self.id + def fetch_request_check(self): + fetch_request = salt.utils.path.join(self._salt_working_dir, "fetch_request") + if os.path.isfile(fetch_request): + log.debug(f"Fetch request: {self._salt_working_dir}") + try: + os.remove(fetch_request) + except OSError as exc: + log.error( + f"Failed to remove Fetch request: {self._salt_working_dir} {exc}", + exc_info=True, + ) + self.fetch() + return True + return False + @property def linkdir_walk(self): """ @@ -1146,14 +1241,14 @@ def linkdir_walk(self): dirs = [] self._linkdir_walk.append( ( - salt.utils.path.join(self.linkdir, *parts[: idx + 1]), + salt.utils.path.join(self._linkdir, *parts[: idx + 1]), dirs, [], ) ) try: # The linkdir itself goes at the beginning - self._linkdir_walk.insert(0, (self.linkdir, [parts[0]], [])) + self._linkdir_walk.insert(0, (self._linkdir, [parts[0]], [])) except IndexError: pass return self._linkdir_walk @@ -1203,13 +1298,17 @@ def __init__( role, ) - def checkout(self): + def checkout(self, fetch_on_fail=True): """ Checkout the configured branch/tag. We catch an "Exception" class here instead of a specific exception class because the exceptions raised by GitPython when running these functions vary in different versions of GitPython. + + fetch_on_fail + If checkout fails perform a fetch then try to checkout again. """ + self.fetch_request_check() tgt_ref = self.get_checkout_target() try: head_sha = self.repo.rev_parse("HEAD").hexsha @@ -1273,6 +1372,15 @@ def checkout(self): except Exception: # pylint: disable=broad-except continue return self.check_root() + if fetch_on_fail: + log.debug( + "Failed to checkout %s from %s remote '%s': fetch and try again", + tgt_ref, + self.role, + self.id, + ) + self.fetch() + return self.checkout(fetch_on_fail=False) log.error( "Failed to checkout %s from %s remote '%s': remote ref does not exist", tgt_ref, @@ -1288,16 +1396,16 @@ def init_remote(self): initialized by this function. """ new = False - if not os.listdir(self.cachedir): + if not os.listdir(self._cachedir): # Repo cachedir is empty, initialize a new repo there - self.repo = git.Repo.init(self.cachedir) + self.repo = git.Repo.init(self._cachedir) new = True else: # Repo cachedir exists, try to attach try: - self.repo = git.Repo(self.cachedir) + self.repo = git.Repo(self._cachedir) except git.exc.InvalidGitRepositoryError: - log.error(_INVALID_REPO, self.cachedir, self.url, self.role) + log.error(_INVALID_REPO, self._cachedir, self.url, self.role) return new self.gitdir = salt.utils.path.join(self.repo.working_dir, ".git") @@ -1531,10 +1639,14 @@ def peel(self, obj): except AttributeError: return obj.get_object() - def checkout(self): + def checkout(self, fetch_on_fail=True): """ Checkout the configured branch/tag + + fetch_on_fail + If checkout fails perform a fetch then try to checkout again. """ + self.fetch_request_check() tgt_ref = self.get_checkout_target() local_ref = "refs/heads/" + tgt_ref remote_ref = "refs/remotes/origin/" + tgt_ref @@ -1724,6 +1836,15 @@ def _perform_checkout(checkout_ref, branch=True): exc_info=True, ) return None + if fetch_on_fail: + log.debug( + "Failed to checkout %s from %s remote '%s': fetch and try again", + tgt_ref, + self.role, + self.id, + ) + self.fetch() + return self.checkout(fetch_on_fail=False) log.error( "Failed to checkout %s from %s remote '%s': remote ref does not exist", tgt_ref, @@ -1765,16 +1886,16 @@ def init_remote(self): home = os.path.expanduser("~") pygit2.settings.search_path[pygit2.GIT_CONFIG_LEVEL_GLOBAL] = home new = False - if not os.listdir(self.cachedir): + if not os.listdir(self._cachedir): # Repo cachedir is empty, initialize a new repo there - self.repo = pygit2.init_repository(self.cachedir) + self.repo = pygit2.init_repository(self._cachedir) new = True else: # Repo cachedir exists, try to attach try: - self.repo = pygit2.Repository(self.cachedir) + self.repo = pygit2.Repository(self._cachedir) except KeyError: - log.error(_INVALID_REPO, self.cachedir, self.url, self.role) + log.error(_INVALID_REPO, self._cachedir, self.url, self.role) return new self.gitdir = salt.utils.path.join(self.repo.workdir, ".git") @@ -2294,6 +2415,7 @@ def fetch_remotes(self): self.file_list_cachedir = salt.utils.path.join( self.opts["cachedir"], "file_lists", self.role ) + salt.utils.cache.verify_cache_version(self.cache_root) if init_remotes: self.init_remotes( remotes if remotes is not None else [], @@ -2366,8 +2488,6 @@ def init_remotes( ) if hasattr(repo_obj, "repo"): # Sanity check and assign the credential parameter - repo_obj.verify_auth() - repo_obj.setup_callbacks() if self.opts["__role"] == "minion" and repo_obj.new: # Perform initial fetch on masterless minion repo_obj.fetch() @@ -2416,7 +2536,7 @@ def init_remotes( # Don't allow collisions in cachedir naming cachedir_map = {} for repo in self.remotes: - cachedir_map.setdefault(repo.cachedir, []).append(repo.id) + cachedir_map.setdefault(repo.get_cachedir(), []).append(repo.id) collisions = [x for x in cachedir_map if len(cachedir_map[x]) > 1] if collisions: @@ -2433,48 +2553,42 @@ def init_remotes( if any(x.new for x in self.remotes): self.write_remote_map() + def _remove_cache_dir(self, cache_dir): + try: + shutil.rmtree(cache_dir) + except OSError as exc: + log.error( + "Unable to remove old %s remote cachedir %s: %s", + self.role, + cache_dir, + exc, + ) + return False + log.debug("%s removed old cachedir %s", self.role, cache_dir) + return True + + def _iter_remote_hashes(self): + for item in os.listdir(self.cache_root): + if item in ("hash", "refs", "links", "work"): + continue + if os.path.isdir(salt.utils.path.join(self.cache_root, item)): + yield item + def clear_old_remotes(self): """ Remove cache directories for remotes no longer configured """ - try: - cachedir_ls = os.listdir(self.cache_root) - except OSError: - cachedir_ls = [] - # Remove actively-used remotes from list - for repo in self.remotes: - try: - cachedir_ls.remove(repo.cachedir_basename) - except ValueError: - pass - to_remove = [] - for item in cachedir_ls: - if item in ("hash", "refs"): - continue - path = salt.utils.path.join(self.cache_root, item) - if os.path.isdir(path): - to_remove.append(path) - failed = [] - if to_remove: - for rdir in to_remove: - try: - shutil.rmtree(rdir) - except OSError as exc: - log.error( - "Unable to remove old %s remote cachedir %s: %s", - self.role, - rdir, - exc, - ) - failed.append(rdir) - else: - log.debug("%s removed old cachedir %s", self.role, rdir) - for fdir in failed: - to_remove.remove(fdir) - ret = bool(to_remove) - if ret: + change = False + # Remove all hash dirs not part of this group + remote_set = {r.get_cache_basehash() for r in self.remotes} + for item in self._iter_remote_hashes(): + if item not in remote_set: + change = self._remove_cache_dir( + salt.utils.path.join(self.cache_root, item) or change + ) + if not change: self.write_remote_map() - return ret + return change def clear_cache(self): """ @@ -2533,6 +2647,27 @@ def fetch_remotes(self, remotes=None): name = getattr(repo, "name", None) if not remotes or (repo.id, name) in remotes or name in remotes: try: + # Find and place fetch_request file for all the other branches for this repo + repo_work_hash = os.path.split(repo.get_salt_working_dir())[0] + for branch in os.listdir(repo_work_hash): + # Don't place fetch request in current branch being updated + if branch == repo.get_cache_basename(): + continue + branch_salt_dir = salt.utils.path.join(repo_work_hash, branch) + fetch_path = salt.utils.path.join( + branch_salt_dir, "fetch_request" + ) + if os.path.isdir(branch_salt_dir): + try: + with salt.utils.files.fopen(fetch_path, "w"): + pass + except OSError as exc: # pylint: disable=broad-except + log.error( + f"Failed to make fetch request: {fetch_path} {exc}", + exc_info=True, + ) + else: + log.error(f"Failed to make fetch request: {fetch_path}") if repo.fetch(): # We can't just use the return value from repo.fetch() # because the data could still have changed if old @@ -2787,7 +2922,7 @@ def write_remote_map(self): for repo in self.remotes: fp_.write( salt.utils.stringutils.to_str( - "{} = {}\n".format(repo.cachedir_basename, repo.id) + "{} = {}\n".format(repo.get_cache_basehash(), repo.id) ) ) except OSError: @@ -2795,15 +2930,18 @@ def write_remote_map(self): else: log.info("Wrote new %s remote map to %s", self.role, remote_map) - def do_checkout(self, repo): + def do_checkout(self, repo, fetch_on_fail=True): """ Common code for git_pillar/winrepo to handle locking and checking out of a repo. + + fetch_on_fail + If checkout fails perform a fetch then try to checkout again. """ time_start = time.time() while time.time() - time_start <= 5: try: - return repo.checkout() + return repo.checkout(fetch_on_fail=fetch_on_fail) except GitLockError as exc: if exc.errno == errno.EEXIST: time.sleep(0.1) @@ -3198,14 +3336,17 @@ class GitPillar(GitBase): role = "git_pillar" - def checkout(self): + def checkout(self, fetch_on_fail=True): """ Checkout the targeted branches/tags from the git_pillar remotes + + fetch_on_fail + If checkout fails perform a fetch then try to checkout again. """ self.pillar_dirs = OrderedDict() self.pillar_linked_dirs = [] for repo in self.remotes: - cachedir = self.do_checkout(repo) + cachedir = self.do_checkout(repo, fetch_on_fail=fetch_on_fail) if cachedir is not None: # Figure out which environment this remote should be assigned if repo.branch == "__env__" and hasattr(repo, "all_saltenvs"): @@ -3222,8 +3363,8 @@ def checkout(self): env = "base" if tgt == repo.base else tgt if repo._mountpoint: if self.link_mountpoint(repo): - self.pillar_dirs[repo.linkdir] = env - self.pillar_linked_dirs.append(repo.linkdir) + self.pillar_dirs[repo.get_linkdir()] = env + self.pillar_linked_dirs.append(repo.get_linkdir()) else: self.pillar_dirs[cachedir] = env @@ -3232,17 +3373,19 @@ def link_mountpoint(self, repo): Ensure that the mountpoint is present in the correct location and points at the correct path """ - lcachelink = salt.utils.path.join(repo.linkdir, repo._mountpoint) - lcachedest = salt.utils.path.join(repo.cachedir, repo.root()).rstrip(os.sep) + lcachelink = salt.utils.path.join(repo.get_linkdir(), repo._mountpoint) + lcachedest = salt.utils.path.join(repo.get_cachedir(), repo.root()).rstrip( + os.sep + ) wipe_linkdir = False create_link = False try: with repo.gen_lock(lock_type="mountpoint", timeout=10): - walk_results = list(os.walk(repo.linkdir, followlinks=False)) + walk_results = list(os.walk(repo.get_linkdir(), followlinks=False)) if walk_results != repo.linkdir_walk: log.debug( "Results of walking %s differ from expected results", - repo.linkdir, + repo.get_linkdir(), ) log.debug("Walk results: %s", walk_results) log.debug("Expected results: %s", repo.linkdir_walk) @@ -3303,7 +3446,7 @@ def link_mountpoint(self, repo): # Wiping implies that we need to create the link create_link = True try: - shutil.rmtree(repo.linkdir) + shutil.rmtree(repo.get_linkdir()) except OSError: pass try: @@ -3355,6 +3498,9 @@ def link_mountpoint(self, repo): class WinRepo(GitBase): """ Functionality specific to the winrepo runner + + fetch_on_fail + If checkout fails perform a fetch then try to checkout again. """ role = "winrepo" @@ -3362,12 +3508,12 @@ class WinRepo(GitBase): # out the repos. winrepo_dirs = {} - def checkout(self): + def checkout(self, fetch_on_fail=True): """ Checkout the targeted branches/tags from the winrepo remotes """ self.winrepo_dirs = {} for repo in self.remotes: - cachedir = self.do_checkout(repo) + cachedir = self.do_checkout(repo, fetch_on_fail=fetch_on_fail) if cachedir is not None: self.winrepo_dirs[repo.id] = cachedir diff --git a/salt/utils/http.py b/salt/utils/http.py index 91c5cbf08edf..5fae89efc8c5 100644 --- a/salt/utils/http.py +++ b/salt/utils/http.py @@ -5,7 +5,7 @@ .. versionadded:: 2015.5.0 """ -import cgi +import email.message import gzip import http.client import http.cookiejar @@ -35,6 +35,7 @@ import salt.utils.network import salt.utils.platform import salt.utils.stringutils +import salt.utils.url import salt.utils.xmlutil as xml import salt.utils.yaml import salt.version @@ -61,14 +62,6 @@ HAS_MATCHHOSTNAME = False # pylint: enable=no-name-in-module - -try: - import salt.ext.tornado.curl_httpclient - - HAS_CURL_HTTPCLIENT = True -except ImportError: - HAS_CURL_HTTPCLIENT = False - try: import requests @@ -84,7 +77,7 @@ HAS_CERTIFI = False log = logging.getLogger(__name__) -USERAGENT = "Salt/{}".format(salt.version.__version__) +USERAGENT = f"Salt/{salt.version.__version__}" def __decompressContent(coding, pgctnt): @@ -119,6 +112,37 @@ def __decompressContent(coding, pgctnt): return pgctnt +def _decode_result_text(result_text, backend, decode_body=None, result=None): + """ + Decode only the result_text + """ + if backend == "requests": + if not isinstance(result_text, str) and decode_body: + result_text = result_text.decode(result.encoding or "utf-8") + else: + if isinstance(result_text, bytes) and decode_body: + result_text = result_text.decode("utf-8") + return result_text + + +def _decode_result(result_text, result_headers, backend, decode_body=None, result=None): + """ + Decode the result_text and headers. + """ + if "Content-Type" in result_headers: + msg = email.message.EmailMessage() + msg.add_header("Content-Type", result_headers["Content-Type"]) + if msg.get_content_type().startswith("text/"): + content_charset = msg.get_content_charset() + if content_charset and not isinstance(result_text, str): + result_text = result_text.decode(content_charset) + result_text = _decode_result_text( + result_text, backend, decode_body=decode_body, result=result + ) + + return result_text, result_headers + + @jinja_filter("http_query") def query( url, @@ -170,7 +194,7 @@ def query( formdata_fieldname=None, formdata_filename=None, decode_body=True, - **kwargs + **kwargs, ): """ Query a resource, and decode the return data @@ -192,6 +216,37 @@ def query( if not backend: backend = opts.get("backend", "tornado") + proxy_host = opts.get("proxy_host", None) + if proxy_host: + proxy_host = salt.utils.stringutils.to_str(proxy_host) + proxy_port = opts.get("proxy_port", None) + proxy_username = opts.get("proxy_username", None) + if proxy_username: + proxy_username = salt.utils.stringutils.to_str(proxy_username) + proxy_password = opts.get("proxy_password", None) + if proxy_password: + proxy_password = salt.utils.stringutils.to_str(proxy_password) + no_proxy = opts.get("no_proxy", []) + + if urllib.parse.urlparse(url).hostname in no_proxy: + proxy_host = None + proxy_port = None + proxy_username = None + proxy_password = None + + http_proxy_url = None + if proxy_host and proxy_port: + if backend != "requests": + log.debug("Switching to request backend due to the use of proxies.") + backend = "requests" + + if proxy_username and proxy_password: + http_proxy_url = ( + f"http://{proxy_username}:{proxy_password}@{proxy_host}:{proxy_port}" + ) + else: + http_proxy_url = f"http://{proxy_host}:{proxy_port}" + match = re.match( r"https?://((25[0-5]|2[0-4]\d|[01]?\d\d?)\.){3}(25[0-5]|2[0-4]\d|[01]?\d\d?)($|/)", url, @@ -295,7 +350,7 @@ def query( auth = (username, password) if agent == USERAGENT: - agent = "{} http.query()".format(agent) + agent = f"{agent} http.query()" header_dict["User-agent"] = agent if backend == "requests": @@ -305,6 +360,8 @@ def query( log.trace("Request Headers: %s", sess.headers) sess_cookies = sess.cookies sess.verify = verify_ssl + if http_proxy_url is not None: + sess.proxies = {"http": http_proxy_url} elif backend == "urllib2": sess_cookies = None else: @@ -360,14 +417,14 @@ def query( url, params=params, files={formdata_fieldname: (formdata_filename, io.StringIO(data))}, - **req_kwargs + **req_kwargs, ) else: result = sess.request(method, url, params=params, data=data, **req_kwargs) result.raise_for_status() if stream is True: # fake a HTTP response header - header_callback("HTTP/1.0 {} MESSAGE".format(result.status_code)) + header_callback(f"HTTP/1.0 {result.status_code} MESSAGE") # fake streaming the content streaming_callback(result.content) return { @@ -388,10 +445,10 @@ def query( result_headers = result.headers result_text = result.content result_cookies = result.cookies - body = result.content - if not isinstance(body, str) and decode_body: - body = body.decode(result.encoding or "utf-8") - ret["body"] = body + result_text = _decode_result_text( + result_text, backend, decode_body=decode_body, result=result + ) + ret["body"] = result_text elif backend == "urllib2": request = urllib.request.Request(url_full, data) handlers = [ @@ -482,18 +539,9 @@ def query( result_status_code = result.code result_headers = dict(result.info()) result_text = result.read() - if "Content-Type" in result_headers: - res_content_type, res_params = cgi.parse_header( - result_headers["Content-Type"] - ) - if ( - res_content_type.startswith("text/") - and "charset" in res_params - and not isinstance(result_text, str) - ): - result_text = result_text.decode(res_params["charset"]) - if isinstance(result_text, bytes) and decode_body: - result_text = result_text.decode("utf-8") + result_text, result_headers = _decode_result( + result_text, result_headers, backend, decode_body=decode_body, result=result + ) ret["body"] = result_text else: # Tornado @@ -532,52 +580,10 @@ def query( salt.config.DEFAULT_MINION_OPTS["http_request_timeout"], ) - client_argspec = None - - proxy_host = opts.get("proxy_host", None) - if proxy_host: - # tornado requires a str for proxy_host, cannot be a unicode str in py2 - proxy_host = salt.utils.stringutils.to_str(proxy_host) - proxy_port = opts.get("proxy_port", None) - proxy_username = opts.get("proxy_username", None) - if proxy_username: - # tornado requires a str, cannot be unicode str in py2 - proxy_username = salt.utils.stringutils.to_str(proxy_username) - proxy_password = opts.get("proxy_password", None) - if proxy_password: - # tornado requires a str, cannot be unicode str in py2 - proxy_password = salt.utils.stringutils.to_str(proxy_password) - no_proxy = opts.get("no_proxy", []) - - # Since tornado doesnt support no_proxy, we'll always hand it empty proxies or valid ones - # except we remove the valid ones if a url has a no_proxy hostname in it - if urllib.parse.urlparse(url_full).hostname in no_proxy: - proxy_host = None - proxy_port = None - proxy_username = None - proxy_password = None - - # We want to use curl_http if we have a proxy defined - if proxy_host and proxy_port: - if HAS_CURL_HTTPCLIENT is False: - ret["error"] = ( - "proxy_host and proxy_port has been set. This requires pycurl and" - " tornado, but the libraries does not seem to be installed" - ) - log.error(ret["error"]) - return ret - - salt.ext.tornado.httpclient.AsyncHTTPClient.configure( - "tornado.curl_httpclient.CurlAsyncHTTPClient" - ) - client_argspec = salt.utils.args.get_function_argspec( - salt.ext.tornado.curl_httpclient.CurlAsyncHTTPClient.initialize - ) - else: - salt.ext.tornado.httpclient.AsyncHTTPClient.configure(None) - client_argspec = salt.utils.args.get_function_argspec( - salt.ext.tornado.simple_httpclient.SimpleAsyncHTTPClient.initialize - ) + salt.ext.tornado.httpclient.AsyncHTTPClient.configure(None) + client_argspec = salt.utils.args.get_function_argspec( + salt.ext.tornado.simple_httpclient.SimpleAsyncHTTPClient.initialize + ) supports_max_body_size = "max_body_size" in client_argspec.args @@ -594,10 +600,6 @@ def query( "header_callback": header_callback, "connect_timeout": connect_timeout, "request_timeout": timeout, - "proxy_host": proxy_host, - "proxy_port": proxy_port, - "proxy_username": proxy_username, - "proxy_password": proxy_password, "raise_error": raise_error, "decompress_response": False, } @@ -618,6 +620,12 @@ def query( except salt.ext.tornado.httpclient.HTTPError as exc: ret["status"] = exc.code ret["error"] = str(exc) + ret["body"], _ = _decode_result( + exc.response.body, + exc.response.headers, + backend, + decode_body=decode_body, + ) return ret except (socket.herror, OSError, socket.timeout, socket.gaierror) as exc: if status is True: @@ -635,18 +643,9 @@ def query( result_status_code = result.code result_headers = result.headers result_text = result.body - if "Content-Type" in result_headers: - res_content_type, res_params = cgi.parse_header( - result_headers["Content-Type"] - ) - if ( - res_content_type.startswith("text/") - and "charset" in res_params - and not isinstance(result_text, str) - ): - result_text = result_text.decode(res_params["charset"]) - if isinstance(result_text, bytes) and decode_body: - result_text = result_text.decode("utf-8") + result_text, result_headers = _decode_result( + result_text, result_headers, backend, decode_body=decode_body, result=result + ) ret["body"] = result_text if "Set-Cookie" in result_headers and cookies is not None: result_cookies = parse_cookie_header(result_headers["Set-Cookie"]) @@ -1038,12 +1037,12 @@ def _sanitize_url_components(comp_list, field): """ if not comp_list: return "" - elif comp_list[0].startswith("{}=".format(field)): - ret = "{}=XXXXXXXXXX&".format(field) + elif comp_list[0].startswith(f"{field}="): + ret = f"{field}=XXXXXXXXXX&" comp_list.remove(comp_list[0]) return ret + _sanitize_url_components(comp_list, field) else: - ret = "{}&".format(comp_list[0]) + ret = f"{comp_list[0]}&" comp_list.remove(comp_list[0]) return ret + _sanitize_url_components(comp_list, field) diff --git a/salt/utils/jinja.py b/salt/utils/jinja.py index fcc5aec497e3..898c8d3fc0d1 100644 --- a/salt/utils/jinja.py +++ b/salt/utils/jinja.py @@ -2,13 +2,12 @@ Jinja loading utils to enable a more powerful backend for jinja templates """ - import itertools import logging import os.path -import pipes import pprint import re +import shlex import time import uuid import warnings @@ -58,19 +57,6 @@ class SaltCacheLoader(BaseLoader): and only loaded once per loader instance. """ - _cached_pillar_client = None - _cached_client = None - - @classmethod - def shutdown(cls): - for attr in ("_cached_client", "_cached_pillar_client"): - client = getattr(cls, attr, None) - if client is not None: - # PillarClient and LocalClient objects do not have a destroy method - if hasattr(client, "destroy"): - client.destroy() - setattr(cls, attr, None) - def __init__( self, opts, @@ -93,8 +79,7 @@ def __init__( log.debug("Jinja search path: %s", self.searchpath) self.cached = [] self._file_client = _file_client - # Instantiate the fileclient - self.file_client() + self._close_file_client = _file_client is None def file_client(self): """ @@ -108,18 +93,10 @@ def file_client(self): or not hasattr(self._file_client, "opts") or self._file_client.opts["file_roots"] != self.opts["file_roots"] ): - attr = "_cached_pillar_client" if self.pillar_rend else "_cached_client" - cached_client = getattr(self, attr, None) - if ( - cached_client is None - or not hasattr(cached_client, "opts") - or cached_client.opts["file_roots"] != self.opts["file_roots"] - ): - cached_client = salt.fileclient.get_file_client( - self.opts, self.pillar_rend - ) - setattr(SaltCacheLoader, attr, cached_client) - self._file_client = cached_client + self._file_client = salt.fileclient.get_file_client( + self.opts, self.pillar_rend + ) + self._close_file_client = True return self._file_client def cache_file(self, template): @@ -150,7 +127,7 @@ def get_source(self, environment, template): the importing file. """ - # FIXME: somewhere do seprataor replacement: '\\' => '/' + # FIXME: somewhere do separator replacement: '\\' => '/' _template = template if template.split("/", 1)[0] in ("..", "."): is_relative = True @@ -159,7 +136,6 @@ def get_source(self, environment, template): # checks for relative '..' paths that step-out of file_roots if is_relative: # Starts with a relative path indicator - if not environment or "tpldir" not in environment.globals: log.warning( 'Relative path "%s" cannot be resolved without an environment', @@ -221,6 +197,27 @@ def uptodate(): # there is no template file within searchpaths raise TemplateNotFound(template) + def destroy(self): + if self._close_file_client is False: + return + if self._file_client is None: + return + file_client = self._file_client + self._file_client = None + + try: + file_client.destroy() + except AttributeError: + # PillarClient and LocalClient objects do not have a destroy method + pass + + def __enter__(self): + self.file_client() + return self + + def __exit__(self, *args): + self.destroy() + class PrintableDict(OrderedDict): """ @@ -243,11 +240,11 @@ def __str__(self): if isinstance(value, str): # keeps quotes around strings # pylint: disable=repr-flag-used-in-string - output.append("{!r}: {!r}".format(key, value)) + output.append(f"{key!r}: {value!r}") # pylint: enable=repr-flag-used-in-string else: # let default output - output.append("{!r}: {!s}".format(key, value)) + output.append(f"{key!r}: {value!s}") return "{" + ", ".join(output) + "}" def __repr__(self): # pylint: disable=W0221 @@ -256,7 +253,7 @@ def __repr__(self): # pylint: disable=W0221 # Raw string formatter required here because this is a repr # function. # pylint: disable=repr-flag-used-in-string - output.append("{!r}: {!r}".format(key, value)) + output.append(f"{key!r}: {value!r}") # pylint: enable=repr-flag-used-in-string return "{" + ", ".join(output) + "}" @@ -442,7 +439,7 @@ def quote(txt): 'my_text' """ - return pipes.quote(txt) + return shlex.quote(txt) @jinja_filter() @@ -1096,13 +1093,13 @@ def load_yaml(self, value): # to the stringified version of the exception. msg += str(exc) else: - msg += "{}\n".format(problem) + msg += f"{problem}\n" msg += salt.utils.stringutils.get_context( buf, line, marker=" <======================" ) raise TemplateRuntimeError(msg) except AttributeError: - raise TemplateRuntimeError("Unable to load yaml from {}".format(value)) + raise TemplateRuntimeError(f"Unable to load yaml from {value}") def load_json(self, value): if isinstance(value, TemplateModule): @@ -1110,7 +1107,7 @@ def load_json(self, value): try: return salt.utils.json.loads(value) except (ValueError, TypeError, AttributeError): - raise TemplateRuntimeError("Unable to load json from {}".format(value)) + raise TemplateRuntimeError(f"Unable to load json from {value}") def load_text(self, value): if isinstance(value, TemplateModule): @@ -1145,7 +1142,7 @@ def parse_profile(self, parser): return self._parse_profile_block(parser, label, "profile block", body, lineno) def _create_profile_id(self, parser): - return "_salt_profile_{}".format(parser.free_identifier().name) + return f"_salt_profile_{parser.free_identifier().name}" def _profile_start(self, label, source): return (label, source, time.time()) @@ -1187,7 +1184,7 @@ def parse_load(self, parser): filter_name = parser.stream.current.value lineno = next(parser.stream).lineno if filter_name not in self.environment.filters: - parser.fail("Unable to parse {}".format(filter_name), lineno) + parser.fail(f"Unable to parse {filter_name}", lineno) parser.stream.expect("name:as") target = parser.parse_assign_target() @@ -1226,7 +1223,7 @@ def parse_import(self, parser, converter): nodes.Name(target, "store").set_lineno(lineno), nodes.Filter( nodes.Name(target, "load").set_lineno(lineno), - "load_{}".format(converter), + f"load_{converter}", [], [], None, @@ -1235,7 +1232,7 @@ def parse_import(self, parser, converter): ).set_lineno(lineno), ] return self._parse_profile_block( - parser, import_node.template, "import_{}".format(converter), body, lineno + parser, import_node.template, f"import_{converter}", body, lineno ) def dict_to_sls_yaml_params(self, value, flow_style=False): diff --git a/salt/utils/job.py b/salt/utils/job.py index a734b015a7c5..140f315f6c97 100644 --- a/salt/utils/job.py +++ b/salt/utils/job.py @@ -125,7 +125,12 @@ def store_job(opts, load, event=None, mminion=None): log.error(emsg) raise KeyError(emsg) - if job_cache != "local_cache": + save_load = True + if job_cache == "local_cache" and mminion.returners[getfstr](load.get("jid", "")): + # The job was saved previously. + save_load = False + + if save_load: try: mminion.returners[savefstr](load["jid"], load) except KeyError as e: @@ -198,6 +203,3 @@ def get_keep_jobs_seconds(opts): ) keep_jobs_seconds = keep_jobs * 3600 return keep_jobs_seconds - - -# vim:set et sts=4 ts=4 tw=80: diff --git a/salt/utils/mako.py b/salt/utils/mako.py index 69618de98374..4397ae8cc7d9 100644 --- a/salt/utils/mako.py +++ b/salt/utils/mako.py @@ -97,3 +97,12 @@ def cache_file(self, fpath): self.cache[fpath] = self.file_client().get_file( fpath, "", True, self.saltenv ) + + def destroy(self): + if self._file_client: + file_client = self._file_client + self._file_client = None + try: + file_client.destroy() + except AttributeError: + pass diff --git a/salt/utils/nacl.py b/salt/utils/nacl.py index d86292a2c41e..b04904c2e14e 100644 --- a/salt/utils/nacl.py +++ b/salt/utils/nacl.py @@ -2,9 +2,7 @@ Common code shared between the nacl module and runner. """ - import base64 -import logging import os import salt.syspaths @@ -15,15 +13,13 @@ import salt.utils.win_dacl import salt.utils.win_functions -log = logging.getLogger(__name__) - REQ_ERROR = None try: - import libnacl.sealed - import libnacl.secret + import nacl.public + import nacl.secret except (ImportError, OSError) as e: REQ_ERROR = ( - "libnacl import error, perhaps missing python libnacl package or should update." + "PyNaCl import error, perhaps missing python PyNaCl package or should update." ) __virtualname__ = "nacl" @@ -113,7 +109,7 @@ def _get_pk(**kwargs): def keygen(sk_file=None, pk_file=None, **kwargs): """ - Use libnacl to generate a keypair. + Use PyNaCl to generate a keypair. If no `sk_file` is defined return a keypair. @@ -143,17 +139,20 @@ def keygen(sk_file=None, pk_file=None, **kwargs): sk_file = kwargs["keyfile"] if sk_file is None: - kp = libnacl.public.SecretKey() - return {"sk": base64.b64encode(kp.sk), "pk": base64.b64encode(kp.pk)} + kp = nacl.public.PrivateKey.generate() + return { + "sk": base64.b64encode(kp.encode()), + "pk": base64.b64encode(kp.public_key.encode()), + } if pk_file is None: pk_file = "{}.pub".format(sk_file) if sk_file and pk_file is None: if not os.path.isfile(sk_file): - kp = libnacl.public.SecretKey() + kp = nacl.public.PrivateKey.generate() with salt.utils.files.fopen(sk_file, "wb") as keyf: - keyf.write(base64.b64encode(kp.sk)) + keyf.write(base64.b64encode(kp.encode())) if salt.utils.platform.is_windows(): cur_user = salt.utils.win_functions.get_current_user() salt.utils.win_dacl.set_owner(sk_file, cur_user) @@ -185,14 +184,14 @@ def keygen(sk_file=None, pk_file=None, **kwargs): with salt.utils.files.fopen(sk_file, "rb") as keyf: sk = salt.utils.stringutils.to_unicode(keyf.read()).rstrip("\n") sk = base64.b64decode(sk) - kp = libnacl.public.SecretKey(sk) + kp = nacl.public.PublicKey(sk) with salt.utils.files.fopen(pk_file, "wb") as keyf: - keyf.write(base64.b64encode(kp.pk)) + keyf.write(base64.b64encode(kp.encode())) return "saved pk_file: {}".format(pk_file) - kp = libnacl.public.SecretKey() + kp = nacl.public.PublicKey.generate() with salt.utils.files.fopen(sk_file, "wb") as keyf: - keyf.write(base64.b64encode(kp.sk)) + keyf.write(base64.b64encode(kp.encode())) if salt.utils.platform.is_windows(): cur_user = salt.utils.win_functions.get_current_user() salt.utils.win_dacl.set_owner(sk_file, cur_user) @@ -203,7 +202,7 @@ def keygen(sk_file=None, pk_file=None, **kwargs): # chmod 0600 file os.chmod(sk_file, 1536) with salt.utils.files.fopen(pk_file, "wb") as keyf: - keyf.write(base64.b64encode(kp.pk)) + keyf.write(base64.b64encode(kp.encode())) return "saved sk_file:{} pk_file: {}".format(sk_file, pk_file) @@ -313,6 +312,7 @@ def dec(data, **kwargs): box_type = _get_config(**kwargs)["box_type"] if box_type == "secretbox": return secretbox_decrypt(data, **kwargs) + return sealedbox_decrypt(data, **kwargs) @@ -366,7 +366,8 @@ def sealedbox_encrypt(data, **kwargs): data = salt.utils.stringutils.to_bytes(data) pk = _get_pk(**kwargs) - b = libnacl.sealed.SealedBox(pk) + keypair = nacl.public.PublicKey(pk) + b = nacl.public.SealedBox(keypair) return base64.b64encode(b.encrypt(data)) @@ -389,8 +390,8 @@ def sealedbox_decrypt(data, **kwargs): data = salt.utils.stringutils.to_bytes(data) sk = _get_sk(**kwargs) - keypair = libnacl.public.SecretKey(sk) - b = libnacl.sealed.SealedBox(keypair) + keypair = nacl.public.PrivateKey(sk) + b = nacl.public.SealedBox(keypair) return b.decrypt(base64.b64decode(data)) @@ -411,7 +412,7 @@ def secretbox_encrypt(data, **kwargs): data = salt.utils.stringutils.to_bytes(data) sk = _get_sk(**kwargs) - b = libnacl.secret.SecretBox(sk) + b = nacl.secret.SecretBox(sk) return base64.b64encode(b.encrypt(data)) @@ -435,6 +436,5 @@ def secretbox_decrypt(data, **kwargs): data = salt.utils.stringutils.to_bytes(data) key = _get_sk(**kwargs) - b = libnacl.secret.SecretBox(key=key) - + b = nacl.secret.SecretBox(key=key) return b.decrypt(base64.b64decode(data)) diff --git a/salt/utils/network.py b/salt/utils/network.py index 2bea2cf1293a..9566f433444b 100644 --- a/salt/utils/network.py +++ b/salt/utils/network.py @@ -674,6 +674,7 @@ def cidr_to_ipv4_netmask(cidr_bits): else: netmask += "{:d}".format(256 - (2 ** (8 - cidr_bits))) cidr_bits = 0 + return netmask @@ -682,8 +683,14 @@ def _number_of_set_bits_to_ipv4_netmask(set_bits): Returns an IPv4 netmask from the integer representation of that mask. Ex. 0xffffff00 -> '255.255.255.0' + 0xffff6400 -> '255.255.100.0' """ - return cidr_to_ipv4_netmask(_number_of_set_bits(set_bits)) + # Note: previously used cidr but that is counting number of bits in set_bits + # and can lead to wrong netmaks values, for example: + # 0xFFFF6400 is 255.255.100.0, 0x64 is 100 decimal + # but if convert to cidr first, it gives 19 bits, get 255.255.224.0 - WRONG + # leveraging Python ip_address library for different method of conversion + return str(ipaddress.ip_address(set_bits)) def _number_of_set_bits(x): @@ -1004,7 +1011,7 @@ def _netbsd_interfaces_ifconfig(out): return ret -def _junos_interfaces_ifconfig(out): +def _junos_interfaces_ifconfig(out): # pragma: no cover """ Uses ifconfig to return a dictionary of interfaces with various information about each (up/down state, ip address, netmask, and hwaddr) @@ -1074,7 +1081,7 @@ def _junos_interfaces_ifconfig(out): return ret -def junos_interfaces(): +def junos_interfaces(): # pragma: no cover """ Obtain interface information for Junos; ifconfig output diverged from other BSD variants (Netmask is now part of the @@ -1239,7 +1246,7 @@ def _get_iface_info(iface): return None, error_msg -def _hw_addr_aix(iface): +def _hw_addr_aix(iface): # pragma: no cover """ Return the hardware address (a.k.a. MAC address) for a given interface on AIX MAC address not available in through interfaces @@ -1277,7 +1284,7 @@ def hw_addr(iface): """ if salt.utils.platform.is_aix(): - return _hw_addr_aix + return _hw_addr_aix(iface) iface_info, error = _get_iface_info(iface) @@ -1746,7 +1753,7 @@ def _netlink_tool_remote_on(port, which_end): return remotes -def _sunos_remotes_on(port, which_end): +def _sunos_remotes_on(port, which_end): # pragma: no cover """ SunOS specific helper function. Returns set of ipv4 host addresses of remote established connections @@ -1786,7 +1793,7 @@ def _sunos_remotes_on(port, which_end): return remotes -def _freebsd_remotes_on(port, which_end): +def _freebsd_remotes_on(port, which_end): # pragma: no cover """ Returns set of ipv4 host addresses of remote established connections on local tcp port port. @@ -1848,7 +1855,7 @@ def _freebsd_remotes_on(port, which_end): return remotes -def _netbsd_remotes_on(port, which_end): +def _netbsd_remotes_on(port, which_end): # pragma: no cover """ Returns set of ipv4 host addresses of remote established connections on local tcp port port. @@ -1909,7 +1916,7 @@ def _netbsd_remotes_on(port, which_end): return remotes -def _openbsd_remotes_on(port, which_end): +def _openbsd_remotes_on(port, which_end): # pragma: no cover """ OpenBSD specific helper function. Returns set of ipv4 host addresses of remote established connections @@ -2053,7 +2060,7 @@ def _linux_remotes_on(port, which_end): return remotes -def _aix_remotes_on(port, which_end): +def _aix_remotes_on(port, which_end): # pragma: no cover """ AIX specific helper function. Returns set of ipv4 host addresses of remote established connections diff --git a/salt/utils/odict.py b/salt/utils/odict.py index 147fa7980a80..76f313fe5be4 100644 --- a/salt/utils/odict.py +++ b/salt/utils/odict.py @@ -19,6 +19,8 @@ http://stackoverflow.com/questions/6190331/ """ +# pragma: no cover # essentially using Python's OrderDict + from collections.abc import Callable diff --git a/salt/utils/parsers.py b/salt/utils/parsers.py index 911b2cbb043c..f3ba1948d890 100644 --- a/salt/utils/parsers.py +++ b/salt/utils/parsers.py @@ -41,7 +41,7 @@ import salt.version as version from salt.defaults import DEFAULT_TARGET_DELIM from salt.utils.validate.path import is_writeable -from salt.utils.verify import verify_log, verify_log_files +from salt.utils.verify import insecure_log, verify_log, verify_log_files log = logging.getLogger(__name__) @@ -454,6 +454,7 @@ def process_saltfile(self): if value != default: # The user passed an argument, we won't override it with the # one from Saltfile, if any + cli_config.pop(option.dest) continue # We reached this far! Set the Saltfile value on the option @@ -477,6 +478,7 @@ def process_saltfile(self): if value != default: # The user passed an argument, we won't override it with # the one from Saltfile, if any + cli_config.pop(option.dest) continue setattr(self.options, option.dest, cli_config[option.dest]) @@ -610,9 +612,11 @@ def _mixin_setup(self): *self._console_log_level_cli_flags, dest=self._loglevel_config_setting_name_, choices=list(salt._logging.LOG_LEVELS), - help="Console logging log level. One of {}. Default: '{}'.".format( + help="Console logging log level. One of {}. Default: '{}'. \n " + "The following log levels are INSECURE and may log sensitive data: {}".format( ", ".join(["'{}'".format(n) for n in salt._logging.SORTED_LEVEL_NAMES]), self._default_logging_level_, + ", ".join(insecure_log()), ), ) @@ -636,9 +640,11 @@ def _logfile_callback(option, opt, value, parser, *args, **kwargs): "--log-file-level", dest=self._logfile_loglevel_config_setting_name_, choices=list(salt._logging.SORTED_LEVEL_NAMES), - help="Logfile logging log level. One of {}. Default: '{}'.".format( + help="Logfile logging log level. One of {}. Default: '{}'. \n " + "The following log levels are INSECURE and may log sensitive data: {}".format( ", ".join(["'{}'".format(n) for n in salt._logging.SORTED_LEVEL_NAMES]), self._default_logging_level_, + ", ".join(insecure_log()), ), ) self._mixin_after_parsed_funcs.append(self.__setup_logging_routines) @@ -2649,7 +2655,7 @@ def _mixin_setup(self): default=".", help=( "Set the directory to save the generated keypair, only " - "works with \"gen_keys_dir\" option. Default: '%default'." + "works with \"--gen-keys\" option. Default: '%default'." ), ) @@ -2767,10 +2773,11 @@ def process_keysize(self): def process_gen_keys_dir(self): # Schedule __create_keys_dir() to run if there's a value for - # --create-keys-dir - self._mixin_after_parsed_funcs.append( - self.__create_keys_dir - ) # pylint: disable=no-member + # --gen-keys-dir + if self.options.gen_keys: + self._mixin_after_parsed_funcs.append( + self.__create_keys_dir + ) # pylint: disable=no-member def __create_keys_dir(self): if not os.path.isdir(self.config["gen_keys_dir"]): diff --git a/salt/utils/platform.py b/salt/utils/platform.py index 6b66da629207..35bb7ff3f818 100644 --- a/salt/utils/platform.py +++ b/salt/utils/platform.py @@ -1,18 +1,35 @@ """ Functions for identifying which platform a machine is """ - +import contextlib import multiprocessing import os import platform import subprocess import sys -from distro import linux_distribution +import distro from salt.utils.decorators import memoize as real_memoize +def linux_distribution(full_distribution_name=True): + """ + Simple function to return information about the OS distribution (id_name, version, codename). + """ + if full_distribution_name: + distro_name = distro.name() + else: + distro_name = distro.id() + # Empty string fallbacks + distro_version = distro_codename = "" + with contextlib.suppress(subprocess.CalledProcessError): + distro_version = distro.version(best=True) + with contextlib.suppress(subprocess.CalledProcessError): + distro_codename = distro.codename() + return distro_name, distro_version, distro_codename + + @real_memoize def is_windows(): """ diff --git a/salt/utils/pyinstaller/__init__.py b/salt/utils/pyinstaller/__init__.py deleted file mode 100644 index eb8a6a85fb44..000000000000 --- a/salt/utils/pyinstaller/__init__.py +++ /dev/null @@ -1,21 +0,0 @@ -""" -This module exists to help PyInstaller bundle Salt -""" -import pathlib - -PYINSTALLER_UTILS_DIR_PATH = pathlib.Path(__file__).resolve().parent - - -def get_hook_dirs(): - """ - Return a list of paths that PyInstaller can search for hooks. - """ - hook_dirs = {PYINSTALLER_UTILS_DIR_PATH} - for path in PYINSTALLER_UTILS_DIR_PATH.iterdir(): - if not path.is_dir(): - continue - if "__pycache__" in path.parts: - continue - hook_dirs.add(path) - - return sorted(str(p) for p in hook_dirs) diff --git a/salt/utils/pyinstaller/hook-salt.py b/salt/utils/pyinstaller/hook-salt.py deleted file mode 100644 index cad74ffd98c0..000000000000 --- a/salt/utils/pyinstaller/hook-salt.py +++ /dev/null @@ -1,146 +0,0 @@ -# pylint: disable=3rd-party-module-not-gated - -import logging -import pathlib -import sys - -from PyInstaller.utils import hooks - -log = logging.getLogger(__name__) - - -def _filter_stdlib_tests(name): - """ - Filter out non useful modules from the stdlib - """ - if ".test." in name: - return False - if ".tests." in name: - return False - if ".idle_test" in name: - return False - return True - - -def _python_stdlib_path(): - """ - Return the path to the standard library folder - """ - base_exec_prefix = pathlib.Path(sys.base_exec_prefix) - log.info("Grabbing 'base_exec_prefix' for platform: %s", sys.platform) - if not sys.platform.lower().startswith("win"): - return base_exec_prefix / "lib" / "python{}.{}".format(*sys.version_info) - return base_exec_prefix / "Lib" - - -def _collect_python_stdlib_hidden_imports(): - """ - Collect all of the standard library(most of it) as hidden imports. - """ - _hidden_imports = set() - - stdlib = _python_stdlib_path() - if not stdlib.exists(): - log.error("The path '%s' does not exist", stdlib) - return list(_hidden_imports) - - log.info( - "Collecting hidden imports from the python standard library at: %s", - stdlib, - ) - for path in stdlib.glob("*"): - if path.is_dir(): - if path.name in ( - "__pycache__", - "site-packages", - "test", - "turtledemo", - "ensurepip", - ): - continue - if path.joinpath("__init__.py").is_file(): - log.info("Collecting: %s", path.name) - try: - _module_hidden_imports = hooks.collect_submodules( - path.name, filter=_filter_stdlib_tests - ) - log.debug("Collected(%s): %s", path.name, _module_hidden_imports) - _hidden_imports.update(set(_module_hidden_imports)) - except Exception as exc: # pylint: disable=broad-except - log.error("Failed to collect %r: %s", path.name, exc) - continue - if path.suffix not in (".py", ".pyc", ".pyo"): - continue - _hidden_imports.add(path.stem) - log.info("Collected stdlib hidden imports: %s", sorted(_hidden_imports)) - return sorted(_hidden_imports) - - -def _collect_python_stdlib_dynamic_libraries(): - """ - Collect all of the standard library(most of it) dynamic libraries. - """ - _dynamic_libs = set() - - stdlib = _python_stdlib_path() - if not stdlib.exists(): - log.error("The path '%s' does not exist", stdlib) - return list(_dynamic_libs) - - log.info( - "Collecting dynamic libraries from the python standard library at: %s", - stdlib, - ) - for path in stdlib.glob("*"): - if not path.is_dir(): - continue - if path.name in ( - "__pycache__", - "site-packages", - "test", - "turtledemo", - "ensurepip", - ): - continue - if path.joinpath("__init__.py").is_file(): - log.info("Collecting: %s", path.name) - try: - _module_dynamic_libs = hooks.collect_dynamic_libs(path.name, path.name) - log.debug("Collected(%s): %s", path.name, _module_dynamic_libs) - _dynamic_libs.update(set(_module_dynamic_libs)) - except Exception as exc: # pylint: disable=broad-except - log.error("Failed to collect %r: %s", path.name, exc) - log.info("Collected stdlib dynamic libs: %s", sorted(_dynamic_libs)) - return sorted(_dynamic_libs) - - -def _filter_submodules(name): - # this should never happen, but serves as a place-holder for when/if we have to filter - if not name.startswith("salt"): - return False - return True - - -# Collect Salt datas, binaries(should be None) and hidden imports -SALT_DATAS, SALT_BINARIES, SALT_HIDDENIMPORTS = hooks.collect_all( - "salt", - include_py_files=True, - filter_submodules=_filter_submodules, -) - -# In case there's salt-extensions installed, collect their datas and hidden imports -SALT_EXTENSIONS_DATAS, SALT_EXTENSIONS_HIDDENIMPORTS = hooks.collect_entry_point( - "salt.loader" -) - - -# PyInstaller attributes -datas = sorted(set(SALT_DATAS + SALT_EXTENSIONS_DATAS)) -binaries = sorted(set(SALT_BINARIES)) -hiddenimports = sorted( - set( - SALT_HIDDENIMPORTS - + SALT_EXTENSIONS_HIDDENIMPORTS - + _collect_python_stdlib_hidden_imports() - ) -) diff --git a/salt/utils/pyinstaller/rthooks.dat b/salt/utils/pyinstaller/rthooks.dat deleted file mode 100644 index b54f09a1df40..000000000000 --- a/salt/utils/pyinstaller/rthooks.dat +++ /dev/null @@ -1,4 +0,0 @@ -{ - "subprocess": ["pyi_rth_subprocess.py"], - "salt.utils.vt": ["pyi_rth_salt.utils.vt.py"], -} diff --git a/salt/utils/pyinstaller/rthooks/__init__.py b/salt/utils/pyinstaller/rthooks/__init__.py deleted file mode 100644 index 00c319dfa303..000000000000 --- a/salt/utils/pyinstaller/rthooks/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -""" -This package contains support code to package Salt with PyInstaller. -""" diff --git a/salt/utils/pyinstaller/rthooks/_overrides.py b/salt/utils/pyinstaller/rthooks/_overrides.py deleted file mode 100644 index ad422aeb7ed3..000000000000 --- a/salt/utils/pyinstaller/rthooks/_overrides.py +++ /dev/null @@ -1,84 +0,0 @@ -""" -This package contains the runtime hooks support code for when Salt is pacakged with PyInstaller. -""" -import io -import logging -import os -import subprocess -import sys - -import salt.utils.vt - -log = logging.getLogger(__name__) - - -def clean_pyinstaller_vars(environ): - """ - Restore or cleanup PyInstaller specific environent variable behavior. - """ - if environ is None: - environ = dict(os.environ) - # When Salt is bundled with tiamat, it MUST NOT contain LD_LIBRARY_PATH - # when shelling out, or, at least the value of LD_LIBRARY_PATH set by - # pyinstaller. - # See: - # https://pyinstaller.readthedocs.io/en/stable/runtime-information.html#ld-library-path-libpath-considerations - for varname in ("LD_LIBRARY_PATH", "LIBPATH"): - original_varname = "{}_ORIG".format(varname) - if varname in environ and environ[varname] == sys._MEIPASS: - # If we find the varname on the user provided environment we need to at least - # check if it's not the value set by PyInstaller, if it is, remove it. - log.debug( - "User provided environment variable %r with value %r which is " - "the value that PyInstaller set's. Removing it", - varname, - environ[varname], - ) - environ.pop(varname) - - if original_varname in environ and varname not in environ: - # We found the original variable set by PyInstaller, and we didn't find - # any user provided variable, let's rename it. - log.debug( - "The %r variable was found in the passed environment, renaming it to %r", - original_varname, - varname, - ) - environ[varname] = environ.pop(original_varname) - - if varname not in environ: - if original_varname in os.environ: - log.debug( - "Renaming environment variable %r to %r", original_varname, varname - ) - environ[varname] = os.environ[original_varname] - elif varname in os.environ: - # Override the system environ variable with an empty one - log.debug("Setting environment variable %r to an empty string", varname) - environ[varname] = "" - return environ - - -class PyinstallerPopen(subprocess.Popen): - def __init__(self, *args, **kwargs): - kwargs["env"] = clean_pyinstaller_vars(kwargs.pop("env", None)) - super().__init__(*args, **kwargs) - - # From https://github.com/pyinstaller/pyinstaller/blob/v5.1/PyInstaller/hooks/rthooks/pyi_rth_subprocess.py - # - # In windowed mode, force any unused pipes (stdin, stdout and stderr) to be DEVNULL instead of inheriting the - # invalid corresponding handles from this parent process. - if sys.platform == "win32" and not isinstance(sys.stdout, io.IOBase): - - def _get_handles(self, stdin, stdout, stderr): - stdin, stdout, stderr = ( - subprocess.DEVNULL if pipe is None else pipe - for pipe in (stdin, stdout, stderr) - ) - return super()._get_handles(stdin, stdout, stderr) - - -class PyinstallerTerminal(salt.utils.vt.Terminal): # pylint: disable=abstract-method - def __init__(self, *args, **kwargs): - kwargs["env"] = clean_pyinstaller_vars(kwargs.pop("env", None)) - super().__init__(*args, **kwargs) diff --git a/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py b/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py deleted file mode 100644 index f16a9d954e0f..000000000000 --- a/salt/utils/pyinstaller/rthooks/pyi_rth_salt.utils.vt.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -PyInstaller runtime hook to patch salt.utils.vt.Terminal -""" -import logging - -import salt.utils.vt -from salt.utils.pyinstaller.rthooks._overrides import PyinstallerTerminal - -log = logging.getLogger(__name__) -# Patch salt.utils.vt.Terminal when running within a pyinstalled bundled package -salt.utils.vt.Terminal = PyinstallerTerminal - -log.debug("Replaced 'salt.utils.vt.Terminal' with 'PyinstallerTerminal'") diff --git a/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py b/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py deleted file mode 100644 index a00ad7fc33b6..000000000000 --- a/salt/utils/pyinstaller/rthooks/pyi_rth_subprocess.py +++ /dev/null @@ -1,13 +0,0 @@ -""" -PyInstaller runtime hook to patch subprocess.Popen -""" -import logging -import subprocess - -from salt.utils.pyinstaller.rthooks._overrides import PyinstallerPopen - -log = logging.getLogger(__name__) -# Patch subprocess.Popen when running within a pyinstalled bundled package -subprocess.Popen = PyinstallerPopen - -log.debug("Replaced 'subprocess.Popen' with 'PyinstallerTerminal'") diff --git a/salt/utils/rsax931.py b/salt/utils/rsax931.py index 9b9d43cc1ee5..fb8a1cbbd74e 100644 --- a/salt/utils/rsax931.py +++ b/salt/utils/rsax931.py @@ -24,13 +24,25 @@ def _find_libcrypto(): Find the path (or return the short name) of libcrypto. """ if sys.platform.startswith("win"): - lib = "libeay32" + lib = None + for path in sys.path: + lib = glob.glob(os.path.join(path, "libcrypto*.dll")) + lib = lib[0] if lib else None + if lib: + break + elif salt.utils.platform.is_darwin(): # will look for several different location on the system, # Search in the following order. salts pkg, homebrew, macports, finnally # system. # look in salts pkg install location. lib = glob.glob("/opt/salt/lib/libcrypto.dylib") + + # look in location salt is running from + # this accounts for running from an unpacked + # onedir file + lib = lib or glob.glob("lib/libcrypto.dylib") + # Find library symlinks in Homebrew locations. brew_prefix = os.getenv("HOMEBREW_PREFIX", "/usr/local") lib = lib or glob.glob( diff --git a/salt/utils/schedule.py b/salt/utils/schedule.py index 814c2980d4af..6565dda59e6e 100644 --- a/salt/utils/schedule.py +++ b/salt/utils/schedule.py @@ -315,7 +315,7 @@ def persist(self): exc_info_on_loglevel=logging.DEBUG, ) - def delete_job(self, name, persist=True): + def delete_job(self, name, persist=True, fire_event=True): """ Deletes a job from the scheduler. Ignore jobs from pillar """ @@ -325,12 +325,15 @@ def delete_job(self, name, persist=True): elif name in self._get_schedule(include_opts=False): log.warning("Cannot delete job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_delete_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_delete_complete", + ) # remove from self.intervals if name in self.intervals: @@ -349,7 +352,7 @@ def reset(self): self.splay = None self.opts["schedule"] = {} - def delete_job_prefix(self, name, persist=True): + def delete_job_prefix(self, name, persist=True, fire_event=True): """ Deletes a job from the scheduler. Ignores jobs from pillar """ @@ -361,12 +364,15 @@ def delete_job_prefix(self, name, persist=True): if job.startswith(name): log.warning("Cannot delete job %s, it's in the pillar!", job) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_delete_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_delete_complete", + ) # remove from self.intervals for job in list(self.intervals.keys()): @@ -376,7 +382,7 @@ def delete_job_prefix(self, name, persist=True): if persist: self.persist() - def add_job(self, data, persist=True): + def add_job(self, data, persist=True, fire_event=True): """ Adds a new job to the scheduler. The format is the same as required in the configuration file. See the docs on how YAML is interpreted into @@ -410,16 +416,19 @@ def add_job(self, data, persist=True): self.opts["schedule"].update(data) # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_add_complete", - ) + if fire_event: + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_add_complete", + ) if persist: self.persist() - def enable_job(self, name, persist=True): + def enable_job(self, name, persist=True, fire_event=True): """ Enable a job in the scheduler. Ignores jobs from pillar """ @@ -430,17 +439,20 @@ def enable_job(self, name, persist=True): elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_enabled_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_enabled_job_complete", + ) if persist: self.persist() - def disable_job(self, name, persist=True): + def disable_job(self, name, persist=True, fire_event=True): """ Disable a job in the scheduler. Ignores jobs from pillar """ @@ -451,23 +463,26 @@ def disable_job(self, name, persist=True): elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - # Fire the complete event back along with updated list of schedule - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_disabled_job_complete", - ) + if fire_event: + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + # Fire the complete event back along with updated list of schedule + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_disabled_job_complete", + ) if persist: self.persist() - def modify_job(self, name, schedule, persist=True): + def modify_job(self, name, schedule, persist=True, fire_event=True): """ Modify a job in the scheduler. Ignores jobs from pillar """ # ensure job exists, then replace it if name in self.opts["schedule"]: - self.delete_job(name, persist) + self.delete_job(name, persist, fire_event) elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) return @@ -511,34 +526,40 @@ def run_job(self, name): log.info("Running Job: %s", name) self._run_job(func, data) - def enable_schedule(self, persist=True): + def enable_schedule(self, persist=True, fire_event=True): """ Enable the scheduler. """ self.opts["schedule"]["enabled"] = True - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_enabled_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_enabled_complete", + ) if persist: self.persist() - def disable_schedule(self, persist=True): + def disable_schedule(self, persist=True, fire_event=True): """ Disable the scheduler. """ self.opts["schedule"]["enabled"] = False - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_disabled_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_disabled_complete", + ) if persist: self.persist() @@ -554,7 +575,7 @@ def reload(self, schedule): schedule = schedule["schedule"] self.opts.setdefault("schedule", {}).update(schedule) - def list(self, where): + def list(self, where, fire_event=True): """ List the current schedule items """ @@ -565,24 +586,32 @@ def list(self, where): else: schedule = self._get_schedule() - # Fire the complete event back along with the list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": schedule}, - tag="/salt/minion/minion_schedule_list_complete", - ) + if fire_event: + # Fire the complete event back along with the list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": schedule}, + tag="/salt/minion/minion_schedule_list_complete", + ) - def save_schedule(self): + def save_schedule(self, fire_event=True): """ Save the current schedule """ self.persist() - # Fire the complete event back along with the list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event({"complete": True}, tag="/salt/minion/minion_schedule_saved") + if fire_event: + # Fire the complete event back along with the list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True}, tag="/salt/minion/minion_schedule_saved" + ) - def postpone_job(self, name, data): + def postpone_job(self, name, data, fire_event=True): """ Postpone a job in the scheduler. Ignores jobs from pillar @@ -608,14 +637,17 @@ def postpone_job(self, name, data): elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_postpone_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_postpone_job_complete", + ) - def skip_job(self, name, data): + def skip_job(self, name, data, fire_event=True): """ Skip a job at a specific time in the scheduler. Ignores jobs from pillar @@ -634,14 +666,17 @@ def skip_job(self, name, data): elif name in self._get_schedule(include_opts=False): log.warning("Cannot modify job %s, it's in the pillar!", name) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "schedule": self._get_schedule()}, - tag="/salt/minion/minion_schedule_skip_job_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "schedule": self._get_schedule()}, + tag="/salt/minion/minion_schedule_skip_job_complete", + ) - def get_next_fire_time(self, name, fmt="%Y-%m-%dT%H:%M:%S"): + def get_next_fire_time(self, name, fmt="%Y-%m-%dT%H:%M:%S", fire_event=True): """ Return the next fire time for the specified job """ @@ -653,12 +688,15 @@ def get_next_fire_time(self, name, fmt="%Y-%m-%dT%H:%M:%S"): if _next_fire_time: _next_fire_time = _next_fire_time.strftime(fmt) - # Fire the complete event back along with updated list of schedule - with salt.utils.event.get_event("minion", opts=self.opts, listen=False) as evt: - evt.fire_event( - {"complete": True, "next_fire_time": _next_fire_time}, - tag="/salt/minion/minion_schedule_next_fire_time_complete", - ) + if fire_event: + # Fire the complete event back along with updated list of schedule + with salt.utils.event.get_event( + "minion", opts=self.opts, listen=False + ) as evt: + evt.fire_event( + {"complete": True, "next_fire_time": _next_fire_time}, + tag="/salt/minion/minion_schedule_next_fire_time_complete", + ) def job_status(self, name, fire_event=False): """ diff --git a/salt/utils/templates.py b/salt/utils/templates.py index 2c2f1aa58f0f..4a8adf2a14f2 100644 --- a/salt/utils/templates.py +++ b/salt/utils/templates.py @@ -362,159 +362,169 @@ def render_jinja_tmpl(tmplstr, context, tmplpath=None): elif tmplstr.endswith("\n"): newline = "\n" - if not saltenv: - if tmplpath: - loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath)) - else: - loader = salt.utils.jinja.SaltCacheLoader( - opts, - saltenv, - pillar_rend=context.get("_pillar_rend", False), - _file_client=file_client, - ) + try: + if not saltenv: + if tmplpath: + loader = jinja2.FileSystemLoader(os.path.dirname(tmplpath)) + else: + loader = salt.utils.jinja.SaltCacheLoader( + opts, + saltenv, + pillar_rend=context.get("_pillar_rend", False), + _file_client=file_client, + ) - env_args = {"extensions": [], "loader": loader} - - if hasattr(jinja2.ext, "with_"): - env_args["extensions"].append("jinja2.ext.with_") - if hasattr(jinja2.ext, "do"): - env_args["extensions"].append("jinja2.ext.do") - if hasattr(jinja2.ext, "loopcontrols"): - env_args["extensions"].append("jinja2.ext.loopcontrols") - env_args["extensions"].append(salt.utils.jinja.SerializerExtension) - - opt_jinja_env = opts.get("jinja_env", {}) - opt_jinja_sls_env = opts.get("jinja_sls_env", {}) - - opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {} - opt_jinja_sls_env = opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {} - - # Pass through trim_blocks and lstrip_blocks Jinja parameters - # trim_blocks removes newlines around Jinja blocks - # lstrip_blocks strips tabs and spaces from the beginning of - # line to the start of a block. - if opts.get("jinja_trim_blocks", False): - log.debug("Jinja2 trim_blocks is enabled") - log.warning( - "jinja_trim_blocks is deprecated and will be removed in a future release," - " please use jinja_env and/or jinja_sls_env instead" - ) - opt_jinja_env["trim_blocks"] = True - opt_jinja_sls_env["trim_blocks"] = True - if opts.get("jinja_lstrip_blocks", False): - log.debug("Jinja2 lstrip_blocks is enabled") - log.warning( - "jinja_lstrip_blocks is deprecated and will be removed in a future release," - " please use jinja_env and/or jinja_sls_env instead" - ) - opt_jinja_env["lstrip_blocks"] = True - opt_jinja_sls_env["lstrip_blocks"] = True - - def opt_jinja_env_helper(opts, optname): - for k, v in opts.items(): - k = k.lower() - if hasattr(jinja2.defaults, k.upper()): - log.debug("Jinja2 environment %s was set to %s by %s", k, v, optname) - env_args[k] = v - else: - log.warning("Jinja2 environment %s is not recognized", k) + env_args = {"extensions": [], "loader": loader} - if "sls" in context and context["sls"] != "": - opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env") - else: - opt_jinja_env_helper(opt_jinja_env, "jinja_env") + if hasattr(jinja2.ext, "with_"): + env_args["extensions"].append("jinja2.ext.with_") + if hasattr(jinja2.ext, "do"): + env_args["extensions"].append("jinja2.ext.do") + if hasattr(jinja2.ext, "loopcontrols"): + env_args["extensions"].append("jinja2.ext.loopcontrols") + env_args["extensions"].append(salt.utils.jinja.SerializerExtension) - if opts.get("allow_undefined", False): - jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args) - else: - jinja_env = jinja2.sandbox.SandboxedEnvironment( - undefined=jinja2.StrictUndefined, **env_args - ) + opt_jinja_env = opts.get("jinja_env", {}) + opt_jinja_sls_env = opts.get("jinja_sls_env", {}) - indent_filter = jinja_env.filters.get("indent") - jinja_env.tests.update(JinjaTest.salt_jinja_tests) - jinja_env.filters.update(JinjaFilter.salt_jinja_filters) - if salt.utils.jinja.JINJA_VERSION >= Version("2.11"): - # Use the existing indent filter on Jinja versions where it's not broken - jinja_env.filters["indent"] = indent_filter - jinja_env.globals.update(JinjaGlobal.salt_jinja_globals) - - # globals - jinja_env.globals["odict"] = OrderedDict - jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context - - jinja_env.tests["list"] = salt.utils.data.is_list - - decoded_context = {} - for key, value in context.items(): - if not isinstance(value, str): - if isinstance(value, NamedLoaderContext): - decoded_context[key] = value.value() - else: - decoded_context[key] = value - continue + opt_jinja_env = opt_jinja_env if isinstance(opt_jinja_env, dict) else {} + opt_jinja_sls_env = ( + opt_jinja_sls_env if isinstance(opt_jinja_sls_env, dict) else {} + ) - try: - decoded_context[key] = salt.utils.stringutils.to_unicode( - value, encoding=SLS_ENCODING + # Pass through trim_blocks and lstrip_blocks Jinja parameters + # trim_blocks removes newlines around Jinja blocks + # lstrip_blocks strips tabs and spaces from the beginning of + # line to the start of a block. + if opts.get("jinja_trim_blocks", False): + log.debug("Jinja2 trim_blocks is enabled") + log.warning( + "jinja_trim_blocks is deprecated and will be removed in a future release," + " please use jinja_env and/or jinja_sls_env instead" ) - except UnicodeDecodeError as ex: - log.debug( - "Failed to decode using default encoding (%s), trying system encoding", - SLS_ENCODING, + opt_jinja_env["trim_blocks"] = True + opt_jinja_sls_env["trim_blocks"] = True + if opts.get("jinja_lstrip_blocks", False): + log.debug("Jinja2 lstrip_blocks is enabled") + log.warning( + "jinja_lstrip_blocks is deprecated and will be removed in a future release," + " please use jinja_env and/or jinja_sls_env instead" ) - decoded_context[key] = salt.utils.data.decode(value) + opt_jinja_env["lstrip_blocks"] = True + opt_jinja_sls_env["lstrip_blocks"] = True + + def opt_jinja_env_helper(opts, optname): + for k, v in opts.items(): + k = k.lower() + if hasattr(jinja2.defaults, k.upper()): + log.debug( + "Jinja2 environment %s was set to %s by %s", k, v, optname + ) + env_args[k] = v + else: + log.warning("Jinja2 environment %s is not recognized", k) - jinja_env.globals.update(decoded_context) - try: - template = jinja_env.from_string(tmplstr) - output = template.render(**decoded_context) - except jinja2.exceptions.UndefinedError as exc: - trace = traceback.extract_tb(sys.exc_info()[2]) - line, out = _get_jinja_error(trace, context=decoded_context) - if not line: - tmplstr = "" - raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr) - except ( - jinja2.exceptions.TemplateRuntimeError, - jinja2.exceptions.TemplateSyntaxError, - jinja2.exceptions.SecurityError, - ) as exc: - trace = traceback.extract_tb(sys.exc_info()[2]) - line, out = _get_jinja_error(trace, context=decoded_context) - if not line: - tmplstr = "" - raise SaltRenderError( - "Jinja syntax error: {}{}".format(exc, out), line, tmplstr - ) - except (SaltInvocationError, CommandExecutionError) as exc: - trace = traceback.extract_tb(sys.exc_info()[2]) - line, out = _get_jinja_error(trace, context=decoded_context) - if not line: - tmplstr = "" - raise SaltRenderError( - "Problem running salt function in Jinja template: {}{}".format(exc, out), - line, - tmplstr, - ) - except Exception as exc: # pylint: disable=broad-except - tracestr = traceback.format_exc() - trace = traceback.extract_tb(sys.exc_info()[2]) - line, out = _get_jinja_error(trace, context=decoded_context) - if not line: - tmplstr = "" + if "sls" in context and context["sls"] != "": + opt_jinja_env_helper(opt_jinja_sls_env, "jinja_sls_env") else: - tmplstr += "\n{}".format(tracestr) - log.debug("Jinja Error") - log.debug("Exception:", exc_info=True) - log.debug("Out: %s", out) - log.debug("Line: %s", line) - log.debug("TmplStr: %s", tmplstr) - log.debug("TraceStr: %s", tracestr) + opt_jinja_env_helper(opt_jinja_env, "jinja_env") - raise SaltRenderError( - "Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr - ) + if opts.get("allow_undefined", False): + jinja_env = jinja2.sandbox.SandboxedEnvironment(**env_args) + else: + jinja_env = jinja2.sandbox.SandboxedEnvironment( + undefined=jinja2.StrictUndefined, **env_args + ) + + indent_filter = jinja_env.filters.get("indent") + jinja_env.tests.update(JinjaTest.salt_jinja_tests) + jinja_env.filters.update(JinjaFilter.salt_jinja_filters) + if salt.utils.jinja.JINJA_VERSION >= Version("2.11"): + # Use the existing indent filter on Jinja versions where it's not broken + jinja_env.filters["indent"] = indent_filter + jinja_env.globals.update(JinjaGlobal.salt_jinja_globals) + + # globals + jinja_env.globals["odict"] = OrderedDict + jinja_env.globals["show_full_context"] = salt.utils.jinja.show_full_context + + jinja_env.tests["list"] = salt.utils.data.is_list + + decoded_context = {} + for key, value in context.items(): + if not isinstance(value, str): + if isinstance(value, NamedLoaderContext): + decoded_context[key] = value.value() + else: + decoded_context[key] = value + continue + + try: + decoded_context[key] = salt.utils.stringutils.to_unicode( + value, encoding=SLS_ENCODING + ) + except UnicodeDecodeError: + log.debug( + "Failed to decode using default encoding (%s), trying system encoding", + SLS_ENCODING, + ) + decoded_context[key] = salt.utils.data.decode(value) + + jinja_env.globals.update(decoded_context) + try: + template = jinja_env.from_string(tmplstr) + output = template.render(**decoded_context) + except jinja2.exceptions.UndefinedError as exc: + trace = traceback.extract_tb(sys.exc_info()[2]) + line, out = _get_jinja_error(trace, context=decoded_context) + if not line: + tmplstr = "" + raise SaltRenderError("Jinja variable {}{}".format(exc, out), line, tmplstr) + except ( + jinja2.exceptions.TemplateRuntimeError, + jinja2.exceptions.TemplateSyntaxError, + jinja2.exceptions.SecurityError, + ) as exc: + trace = traceback.extract_tb(sys.exc_info()[2]) + line, out = _get_jinja_error(trace, context=decoded_context) + if not line: + tmplstr = "" + raise SaltRenderError( + "Jinja syntax error: {}{}".format(exc, out), line, tmplstr + ) + except (SaltInvocationError, CommandExecutionError) as exc: + trace = traceback.extract_tb(sys.exc_info()[2]) + line, out = _get_jinja_error(trace, context=decoded_context) + if not line: + tmplstr = "" + raise SaltRenderError( + "Problem running salt function in Jinja template: {}{}".format( + exc, out + ), + line, + tmplstr, + ) + except Exception as exc: # pylint: disable=broad-except + tracestr = traceback.format_exc() + trace = traceback.extract_tb(sys.exc_info()[2]) + line, out = _get_jinja_error(trace, context=decoded_context) + if not line: + tmplstr = "" + else: + tmplstr += "\n{}".format(tracestr) + log.debug("Jinja Error") + log.debug("Exception:", exc_info=True) + log.debug("Out: %s", out) + log.debug("Line: %s", line) + log.debug("TmplStr: %s", tmplstr) + log.debug("TraceStr: %s", tracestr) + + raise SaltRenderError( + "Jinja error: {}{}".format(exc, out), line, tmplstr, trace=tracestr + ) + finally: + if loader and isinstance(loader, salt.utils.jinja.SaltCacheLoader): + loader.destroy() # Workaround a bug in Jinja that removes the final newline # (https://github.com/mitsuhiko/jinja2/issues/75) @@ -564,6 +574,9 @@ def render_mako_tmpl(tmplstr, context, tmplpath=None): ).render(**context) except Exception: # pylint: disable=broad-except raise SaltRenderError(mako.exceptions.text_error_template().render()) + finally: + if lookup and isinstance(lookup, SaltMakoTemplateLookup): + lookup.destroy() def render_wempy_tmpl(tmplstr, context, tmplpath=None): diff --git a/salt/utils/user.py b/salt/utils/user.py index 97636674430a..c9c12f5ca648 100644 --- a/salt/utils/user.py +++ b/salt/utils/user.py @@ -31,13 +31,6 @@ except ImportError: HAS_GRP = False -try: - import pysss - - HAS_PYSSS = True -except ImportError: - HAS_PYSSS = False - try: import salt.utils.win_functions @@ -289,30 +282,35 @@ def get_group_list(user, include_default=True): return [] group_names = None ugroups = set() - if hasattr(os, "getgrouplist"): - # Try os.getgrouplist, available in python >= 3.3 - log.trace("Trying os.getgrouplist for '%s'", user) - try: - user_group_list = os.getgrouplist(user, pwd.getpwnam(user).pw_gid) - group_names = [ - _group.gr_name - for _group in grp.getgrall() - if _group.gr_gid in user_group_list - ] - except Exception: # pylint: disable=broad-except - pass - elif HAS_PYSSS: - # Try pysss.getgrouplist - log.trace("Trying pysss.getgrouplist for '%s'", user) - try: - group_names = list(pysss.getgrouplist(user)) - except Exception: # pylint: disable=broad-except - pass + # Try os.getgrouplist, available in python >= 3.3 + log.trace("Trying os.getgrouplist for '%s'", user) + try: + user_group_list = sorted(os.getgrouplist(user, pwd.getpwnam(user).pw_gid)) + local_grall = _getgrall() + local_gids = sorted(lgrp.gr_gid for lgrp in local_grall) + max_idx = -1 + local_max = local_gids[max_idx] + while local_max >= 65000: + max_idx -= 1 + local_max = local_gids[max_idx] + user_group_list_local = [lgrp for lgrp in user_group_list if lgrp <= local_max] + user_group_list_remote = [rgrp for rgrp in user_group_list if rgrp > local_max] + local_group_names = [ + _group.gr_name + for _group in local_grall + if _group.gr_gid in user_group_list_local + ] + remote_group_names = [ + grp.getgrgid(group_id).gr_name for group_id in user_group_list_remote + ] + group_names = local_group_names + remote_group_names + except Exception: # pylint: disable=broad-except + pass if group_names is None: # Fall back to generic code # Include the user's default group to match behavior of - # os.getgrouplist() and pysss.getgrouplist() + # os.getgrouplist() log.trace("Trying generic group list for '%s'", user) group_names = [g.gr_name for g in grp.getgrall() if user in g.gr_mem] try: @@ -385,3 +383,24 @@ def get_gid(group=None): return grp.getgrnam(group).gr_gid except KeyError: return None + + +def _getgrall(root=None): + """ + Alternative implemetantion for getgrall, that uses only /etc/group + """ + ret = [] + root = "/" if not root else root + etc_group = os.path.join(root, "etc/group") + with salt.utils.files.fopen(etc_group) as fp_: + for line in fp_: + line = salt.utils.stringutils.to_unicode(line) + comps = line.strip().split(":") + # Generate a getgrall compatible output + comps[2] = int(comps[2]) + if comps[3]: + comps[3] = [mem.strip() for mem in comps[3].split(",")] + else: + comps[3] = [] + ret.append(grp.struct_group(comps)) + return ret diff --git a/salt/utils/verify.py b/salt/utils/verify.py index 879128f23121..ab468dd04c14 100644 --- a/salt/utils/verify.py +++ b/salt/utils/verify.py @@ -557,6 +557,17 @@ def safe_py_code(code): return True +def insecure_log(): + """ + Return the insecure logs types + """ + insecure = [] + for level, value in LOG_LEVELS.items(): + if value < LOG_LEVELS.get("info", 20): + insecure.append(level) + return insecure + + def verify_log(opts): """ If an insecre logging configuration is found, show a warning diff --git a/salt/utils/win_dacl.py b/salt/utils/win_dacl.py index ec16f7a22c68..2e9fe87b8116 100644 --- a/salt/utils/win_dacl.py +++ b/salt/utils/win_dacl.py @@ -2349,7 +2349,7 @@ def check_perms( deny_perms=None, inheritance=True, reset=False, - test_mode=None, + test_mode=False, ): """ Check owner and permissions for the passed directory. This function checks @@ -2429,9 +2429,6 @@ def check_perms( } }) """ - if test_mode is None: - test_mode = __opts__["test"] - # Validate obj_type if obj_type.lower() not in flags().obj_type: raise SaltInvocationError('Invalid "obj_type" passed: {}'.format(obj_type)) diff --git a/salt/utils/win_lgpo_reg.py b/salt/utils/win_lgpo_reg.py index da4c4377631d..8144d87f5de2 100644 --- a/salt/utils/win_lgpo_reg.py +++ b/salt/utils/win_lgpo_reg.py @@ -67,13 +67,11 @@ def search_reg_pol(search_string, policy_data): gpt.ini Args: - search_string (str): The string to search for policy_data (str): The data to be searched Returns: - bool: ``True`` if the regex search_string is found, otherwise ``False`` """ if policy_data: @@ -91,7 +89,6 @@ def read_reg_pol_file(reg_pol_path): Helper function to read the content of the Registry.pol file Args: - reg_pol_path (str): The path to the Registry.pol file Returns: @@ -99,7 +96,7 @@ def read_reg_pol_file(reg_pol_path): """ return_data = None if os.path.exists(reg_pol_path): - log.debug("LGPO_REG Utils: Reading from %s", reg_pol_path) + log.debug("LGPO_REG Util: Reading from %s", reg_pol_path) with salt.utils.files.fopen(reg_pol_path, "rb") as pol_file: return_data = pol_file.read() return return_data @@ -120,7 +117,6 @@ def write_reg_pol_data( to be processed Args: - data_to_write (bytes): Data to write into the user/machine registry.pol file @@ -132,6 +128,12 @@ def write_reg_pol_data( gpt_extension_guid (str): ADMX registry extension guid for the class gpt_ini_path (str): The path to the gpt.ini file + + Returns: + bool: True if successful + + Raises: + CommandExecutionError: On failure """ # Write Registry.pol file if not os.path.exists(policy_file_path): @@ -254,6 +256,7 @@ def write_reg_pol_data( ) log.exception(msg) raise CommandExecutionError(msg) + return True def reg_pol_to_dict(policy_data): @@ -273,6 +276,12 @@ def reg_pol_to_dict(policy_data): # https://learn.microsoft.com/en-us/openspecs/windows_protocols/ms-gpreg/5c092c22-bf6b-4e7f-b180-b20743d368f5 reg_pol_header = REG_POL_HEADER.encode("utf-16-le") + + # If policy_data is None, that means the Registry.pol file is missing + # So, we'll create it + if policy_data is None: + policy_data = reg_pol_header + if not policy_data.startswith(reg_pol_header): msg = "LGPO_REG Util: Invalid Header. Registry.pol may be corrupt" raise CommandExecutionError(msg) diff --git a/salt/utils/win_network.py b/salt/utils/win_network.py index 702f29702f32..eeae8fc091b7 100644 --- a/salt/utils/win_network.py +++ b/salt/utils/win_network.py @@ -20,6 +20,7 @@ """ # https://docs.microsoft.com/en-us/dotnet/api/system.net.networkinformation.networkinterface.getallnetworkinterfaces?view=netframework-4.7.2 +import logging import platform import salt.utils.win_reg @@ -27,6 +28,8 @@ IS_WINDOWS = platform.system() == "Windows" +log = logging.getLogger(__name__) + __virtualname__ = "win_network" if IS_WINDOWS: @@ -53,8 +56,23 @@ import salt.utils.winapi else: # This uses .NET to get network settings and is faster than WMI - import clr - from System.Net import NetworkInformation + try: + # pylint: disable=unused-import + import clr + + # pylint: enable=unused-import + from System.Net import NetworkInformation + except RuntimeError: + # In some environments, using the Relenv OneDir package, we can't + # load pythonnet. Uninstalling and reinstalling pythonnet fixes the + # issue, but it is a manual step. Until we figure it out, we are + # just going to fall back to WMI. I was able to reproduce a failing + # system using Windows 10 Home Edition + log.debug("Failed to load pythonnet. Falling back to WMI") + USE_WMI = True + import wmi + + import salt.utils.winapi # TODO: Should we deprecate support for pythonnet 2.5.2, these enumerations can # TODO: be deleted @@ -312,7 +330,6 @@ def _get_ip_wins_info(i_face): def _get_network_interfaces(): - clr.AddReference("System.Net") return NetworkInformation.NetworkInterface.GetAllNetworkInterfaces() diff --git a/salt/utils/win_reg.py b/salt/utils/win_reg.py index cde01a9556a7..74aa17b5d811 100644 --- a/salt/utils/win_reg.py +++ b/salt/utils/win_reg.py @@ -527,6 +527,7 @@ def read_value(hive, key, vname=None, use_32bit_registry=False): "key": local_key, "vname": local_vname, "vdata": None, + "vtype": None, "success": True, } diff --git a/salt/utils/x509.py b/salt/utils/x509.py index 5b2ae15882b2..37ee5155f738 100644 --- a/salt/utils/x509.py +++ b/salt/utils/x509.py @@ -1683,19 +1683,40 @@ def _deserialize_openssl_confstring(conf, multiple=False): def _parse_general_names(val): - def idna_encode(val, allow_leading_dot=False): - if HAS_IDNA: - # A leading dot is allowed in some values. - # idna complains about it not being a valid domain name - has_dot = False - if allow_leading_dot: - has_dot = val.startswith(".") - val = val.lstrip(".") - ret = idna.encode(val).decode() + def idna_encode(val, allow_leading_dot=False, allow_wildcard=False): + # A leading dot is allowed in some values (nameConstraints). + # idna complains about it not being a valid domain name + try: + has_dot = val.startswith(".") + except AttributeError: + raise SaltInvocationError( + f"Expected string value, got {type(val).__name__}: `{val}`" + ) + if has_dot: + if not allow_leading_dot: + raise CommandExecutionError( + "Leading dots are not allowed in this context" + ) + val = val.lstrip(".") + has_wildcard = val.startswith("*.") + if has_wildcard: + if not allow_wildcard: + raise CommandExecutionError("Wildcards are not allowed in this context") if has_dot: - return f".{ret}" - return ret + raise CommandExecutionError( + "Wildcards and leading dots cannot be present together" + ) + val = val[2:] + if val.startswith("."): + raise CommandExecutionError("Empty label") + if HAS_IDNA: + try: + ret = idna.encode(val).decode() + except idna.IDNAError as err: + raise CommandExecutionError(str(err)) from err else: + if not val: + raise CommandExecutionError("Empty domain") try: val.encode(encoding="ascii") except UnicodeEncodeError as err: @@ -1703,6 +1724,20 @@ def idna_encode(val, allow_leading_dot=False): "Cannot encode non-ASCII strings to internationalized domain " "name format, missing library: idna" ) from err + for elem in val.split("."): + if not elem: + raise CommandExecutionError("Empty Label") + invalid = re.search(r"[^A-Za-z\d\-\.]", elem) + if invalid is not None: + raise CommandExecutionError( + f"Codepoint U+00{hex(ord(invalid.group()))[2:]} at position {invalid.end()} of '{val}' not allowed" + ) + ret = val + if has_dot: + return f".{ret}" + if has_wildcard: + return f"*.{ret}" + return ret valid_types = { "email": cx509.general_name.RFC822Name, @@ -1738,6 +1773,7 @@ def idna_encode(val, allow_leading_dot=False): domain = idna_encode(domain) v = "@".join((user, domain)) else: + # nameConstraints v = idna_encode(splits[0], allow_leading_dot=True) elif typ == "uri": url = urlparse(v) @@ -1747,7 +1783,7 @@ def idna_encode(val, allow_leading_dot=False): (url.scheme, domain, url.path, url.params, url.query, url.fragment) ) elif typ == "dns": - v = idna_encode(v, allow_leading_dot=True) + v = idna_encode(v, allow_leading_dot=True, allow_wildcard=True) elif typ == "othername": raise SaltInvocationError("otherName is currently not implemented") if typ in valid_types: diff --git a/salt/utils/zfs.py b/salt/utils/zfs.py index 98546298e7b0..3c38cecc7aa6 100644 --- a/salt/utils/zfs.py +++ b/salt/utils/zfs.py @@ -755,6 +755,3 @@ def parse_command_result(res, label=None): del ret["error"] return ret - - -# vim: tabstop=4 expandtab shiftwidth=4 softtabstop=4 diff --git a/salt/version.py b/salt/version.py index 88f95febe0f3..ab852acca0b3 100644 --- a/salt/version.py +++ b/salt/version.py @@ -1,6 +1,7 @@ """ Set up the version of Salt """ +import argparse import operator import os import platform @@ -77,109 +78,109 @@ class SaltVersionsInfo(type): ALUMINIUM = SaltVersion("Aluminium" , info=3003, released=True) SILICON = SaltVersion("Silicon" , info=3004, released=True) PHOSPHORUS = SaltVersion("Phosphorus" , info=3005, released=True) - SULFUR = SaltVersion("Sulfur" , info=(3006, 0)) - CHLORINE = SaltVersion("Chlorine" , info=(3007, 0)) - ARGON = SaltVersion("Argon" , info=(3008, 0)) - POTASSIUM = SaltVersion("Potassium" , info=(3009, 0)) - CALCIUM = SaltVersion("Calcium" , info=(3010, 0)) - SCANDIUM = SaltVersion("Scandium" , info=(3011, 0)) - TITANIUM = SaltVersion("Titanium" , info=(3012, 0)) - VANADIUM = SaltVersion("Vanadium" , info=(3013, 0)) - CHROMIUM = SaltVersion("Chromium" , info=(3014, 0)) - MANGANESE = SaltVersion("Manganese" , info=(3015, 0)) - IRON = SaltVersion("Iron" , info=(3016, 0)) - COBALT = SaltVersion("Cobalt" , info=(3017, 0)) - NICKEL = SaltVersion("Nickel" , info=(3018, 0)) - COPPER = SaltVersion("Copper" , info=(3019, 0)) - ZINC = SaltVersion("Zinc" , info=(3020, 0)) - GALLIUM = SaltVersion("Gallium" , info=(3021, 0)) - GERMANIUM = SaltVersion("Germanium" , info=(3022, 0)) - ARSENIC = SaltVersion("Arsenic" , info=(3023, 0)) - SELENIUM = SaltVersion("Selenium" , info=(3024, 0)) - BROMINE = SaltVersion("Bromine" , info=(3025, 0)) - KRYPTON = SaltVersion("Krypton" , info=(3026, 0)) - RUBIDIUM = SaltVersion("Rubidium" , info=(3027, 0)) - STRONTIUM = SaltVersion("Strontium" , info=(3028, 0)) - YTTRIUM = SaltVersion("Yttrium" , info=(3029, 0)) - ZIRCONIUM = SaltVersion("Zirconium" , info=(3030, 0)) - NIOBIUM = SaltVersion("Niobium" , info=(3031, 0)) - MOLYBDENUM = SaltVersion("Molybdenum" , info=(3032, 0)) - TECHNETIUM = SaltVersion("Technetium" , info=(3033, 0)) - RUTHENIUM = SaltVersion("Ruthenium" , info=(3034, 0)) - RHODIUM = SaltVersion("Rhodium" , info=(3035, 0)) - PALLADIUM = SaltVersion("Palladium" , info=(3036, 0)) - SILVER = SaltVersion("Silver" , info=(3037, 0)) - CADMIUM = SaltVersion("Cadmium" , info=(3038, 0)) - INDIUM = SaltVersion("Indium" , info=(3039, 0)) - TIN = SaltVersion("Tin" , info=(3040, 0)) - ANTIMONY = SaltVersion("Antimony" , info=(3041, 0)) - TELLURIUM = SaltVersion("Tellurium" , info=(3042, 0)) - IODINE = SaltVersion("Iodine" , info=(3043, 0)) - XENON = SaltVersion("Xenon" , info=(3044, 0)) - CESIUM = SaltVersion("Cesium" , info=(3045, 0)) - BARIUM = SaltVersion("Barium" , info=(3046, 0)) - LANTHANUM = SaltVersion("Lanthanum" , info=(3047, 0)) - CERIUM = SaltVersion("Cerium" , info=(3048, 0)) - PRASEODYMIUM = SaltVersion("Praseodymium" , info=(3049, 0)) - NEODYMIUM = SaltVersion("Neodymium" , info=(3050, 0)) - PROMETHIUM = SaltVersion("Promethium" , info=(3051, 0)) - SAMARIUM = SaltVersion("Samarium" , info=(3052, 0)) - EUROPIUM = SaltVersion("Europium" , info=(3053, 0)) - GADOLINIUM = SaltVersion("Gadolinium" , info=(3054, 0)) - TERBIUM = SaltVersion("Terbium" , info=(3055, 0)) - DYSPROSIUM = SaltVersion("Dysprosium" , info=(3056, 0)) - HOLMIUM = SaltVersion("Holmium" , info=(3057, 0)) - ERBIUM = SaltVersion("Erbium" , info=(3058, 0)) - THULIUM = SaltVersion("Thulium" , info=(3059, 0)) - YTTERBIUM = SaltVersion("Ytterbium" , info=(3060, 0)) - LUTETIUM = SaltVersion("Lutetium" , info=(3061, 0)) - HAFNIUM = SaltVersion("Hafnium" , info=(3062, 0)) - TANTALUM = SaltVersion("Tantalum" , info=(3063, 0)) - TUNGSTEN = SaltVersion("Tungsten" , info=(3064, 0)) - RHENIUM = SaltVersion("Rhenium" , info=(3065, 0)) - OSMIUM = SaltVersion("Osmium" , info=(3066, 0)) - IRIDIUM = SaltVersion("Iridium" , info=(3067, 0)) - PLATINUM = SaltVersion("Platinum" , info=(3068, 0)) - GOLD = SaltVersion("Gold" , info=(3069, 0)) - MERCURY = SaltVersion("Mercury" , info=(3070, 0)) - THALLIUM = SaltVersion("Thallium" , info=(3071, 0)) - LEAD = SaltVersion("Lead" , info=(3072, 0)) - BISMUTH = SaltVersion("Bismuth" , info=(3073, 0)) - POLONIUM = SaltVersion("Polonium" , info=(3074, 0)) - ASTATINE = SaltVersion("Astatine" , info=(3075, 0)) - RADON = SaltVersion("Radon" , info=(3076, 0)) - FRANCIUM = SaltVersion("Francium" , info=(3077, 0)) - RADIUM = SaltVersion("Radium" , info=(3078, 0)) - ACTINIUM = SaltVersion("Actinium" , info=(3079, 0)) - THORIUM = SaltVersion("Thorium" , info=(3080, 0)) - PROTACTINIUM = SaltVersion("Protactinium" , info=(3081, 0)) - URANIUM = SaltVersion("Uranium" , info=(3082, 0)) - NEPTUNIUM = SaltVersion("Neptunium" , info=(3083, 0)) - PLUTONIUM = SaltVersion("Plutonium" , info=(3084, 0)) - AMERICIUM = SaltVersion("Americium" , info=(3085, 0)) - CURIUM = SaltVersion("Curium" , info=(3086, 0)) - BERKELIUM = SaltVersion("Berkelium" , info=(3087, 0)) - CALIFORNIUM = SaltVersion("Californium" , info=(3088, 0)) - EINSTEINIUM = SaltVersion("Einsteinium" , info=(3089, 0)) - FERMIUM = SaltVersion("Fermium" , info=(3090, 0)) - MENDELEVIUM = SaltVersion("Mendelevium" , info=(3091, 0)) - NOBELIUM = SaltVersion("Nobelium" , info=(3092, 0)) - LAWRENCIUM = SaltVersion("Lawrencium" , info=(3093, 0)) - RUTHERFORDIUM = SaltVersion("Rutherfordium", info=(3094, 0)) - DUBNIUM = SaltVersion("Dubnium" , info=(3095, 0)) - SEABORGIUM = SaltVersion("Seaborgium" , info=(3096, 0)) - BOHRIUM = SaltVersion("Bohrium" , info=(3097, 0)) - HASSIUM = SaltVersion("Hassium" , info=(3098, 0)) - MEITNERIUM = SaltVersion("Meitnerium" , info=(3099, 0)) - DARMSTADTIUM = SaltVersion("Darmstadtium" , info=(3100, 0)) - ROENTGENIUM = SaltVersion("Roentgenium" , info=(3101, 0)) - COPERNICIUM = SaltVersion("Copernicium" , info=(3102, 0)) - NIHONIUM = SaltVersion("Nihonium" , info=(3103, 0)) - FLEROVIUM = SaltVersion("Flerovium" , info=(3104, 0)) - MOSCOVIUM = SaltVersion("Moscovium" , info=(3105, 0)) - LIVERMORIUM = SaltVersion("Livermorium" , info=(3106, 0)) - TENNESSINE = SaltVersion("Tennessine" , info=(3107, 0)) - OGANESSON = SaltVersion("Oganesson" , info=(3108, 0)) + SULFUR = SaltVersion("Sulfur" , info=3006, released=True) + CHLORINE = SaltVersion("Chlorine" , info=3007) + ARGON = SaltVersion("Argon" , info=3008) + POTASSIUM = SaltVersion("Potassium" , info=3009) + CALCIUM = SaltVersion("Calcium" , info=3010) + SCANDIUM = SaltVersion("Scandium" , info=3011) + TITANIUM = SaltVersion("Titanium" , info=3012) + VANADIUM = SaltVersion("Vanadium" , info=3013) + CHROMIUM = SaltVersion("Chromium" , info=3014) + MANGANESE = SaltVersion("Manganese" , info=3015) + IRON = SaltVersion("Iron" , info=3016) + COBALT = SaltVersion("Cobalt" , info=3017) + NICKEL = SaltVersion("Nickel" , info=3018) + COPPER = SaltVersion("Copper" , info=3019) + ZINC = SaltVersion("Zinc" , info=3020) + GALLIUM = SaltVersion("Gallium" , info=3021) + GERMANIUM = SaltVersion("Germanium" , info=3022) + ARSENIC = SaltVersion("Arsenic" , info=3023) + SELENIUM = SaltVersion("Selenium" , info=3024) + BROMINE = SaltVersion("Bromine" , info=3025) + KRYPTON = SaltVersion("Krypton" , info=3026) + RUBIDIUM = SaltVersion("Rubidium" , info=3027) + STRONTIUM = SaltVersion("Strontium" , info=3028) + YTTRIUM = SaltVersion("Yttrium" , info=3029) + ZIRCONIUM = SaltVersion("Zirconium" , info=3030) + NIOBIUM = SaltVersion("Niobium" , info=3031) + MOLYBDENUM = SaltVersion("Molybdenum" , info=3032) + TECHNETIUM = SaltVersion("Technetium" , info=3033) + RUTHENIUM = SaltVersion("Ruthenium" , info=3034) + RHODIUM = SaltVersion("Rhodium" , info=3035) + PALLADIUM = SaltVersion("Palladium" , info=3036) + SILVER = SaltVersion("Silver" , info=3037) + CADMIUM = SaltVersion("Cadmium" , info=3038) + INDIUM = SaltVersion("Indium" , info=3039) + TIN = SaltVersion("Tin" , info=3040) + ANTIMONY = SaltVersion("Antimony" , info=3041) + TELLURIUM = SaltVersion("Tellurium" , info=3042) + IODINE = SaltVersion("Iodine" , info=3043) + XENON = SaltVersion("Xenon" , info=3044) + CESIUM = SaltVersion("Cesium" , info=3045) + BARIUM = SaltVersion("Barium" , info=3046) + LANTHANUM = SaltVersion("Lanthanum" , info=3047) + CERIUM = SaltVersion("Cerium" , info=3048) + PRASEODYMIUM = SaltVersion("Praseodymium" , info=3049) + NEODYMIUM = SaltVersion("Neodymium" , info=3050) + PROMETHIUM = SaltVersion("Promethium" , info=3051) + SAMARIUM = SaltVersion("Samarium" , info=3052) + EUROPIUM = SaltVersion("Europium" , info=3053) + GADOLINIUM = SaltVersion("Gadolinium" , info=3054) + TERBIUM = SaltVersion("Terbium" , info=3055) + DYSPROSIUM = SaltVersion("Dysprosium" , info=3056) + HOLMIUM = SaltVersion("Holmium" , info=3057) + ERBIUM = SaltVersion("Erbium" , info=3058) + THULIUM = SaltVersion("Thulium" , info=3059) + YTTERBIUM = SaltVersion("Ytterbium" , info=3060) + LUTETIUM = SaltVersion("Lutetium" , info=3061) + HAFNIUM = SaltVersion("Hafnium" , info=3062) + TANTALUM = SaltVersion("Tantalum" , info=3063) + TUNGSTEN = SaltVersion("Tungsten" , info=3064) + RHENIUM = SaltVersion("Rhenium" , info=3065) + OSMIUM = SaltVersion("Osmium" , info=3066) + IRIDIUM = SaltVersion("Iridium" , info=3067) + PLATINUM = SaltVersion("Platinum" , info=3068) + GOLD = SaltVersion("Gold" , info=3069) + MERCURY = SaltVersion("Mercury" , info=3070) + THALLIUM = SaltVersion("Thallium" , info=3071) + LEAD = SaltVersion("Lead" , info=3072) + BISMUTH = SaltVersion("Bismuth" , info=3073) + POLONIUM = SaltVersion("Polonium" , info=3074) + ASTATINE = SaltVersion("Astatine" , info=3075) + RADON = SaltVersion("Radon" , info=3076) + FRANCIUM = SaltVersion("Francium" , info=3077) + RADIUM = SaltVersion("Radium" , info=3078) + ACTINIUM = SaltVersion("Actinium" , info=3079) + THORIUM = SaltVersion("Thorium" , info=3080) + PROTACTINIUM = SaltVersion("Protactinium" , info=3081) + URANIUM = SaltVersion("Uranium" , info=3082) + NEPTUNIUM = SaltVersion("Neptunium" , info=3083) + PLUTONIUM = SaltVersion("Plutonium" , info=3084) + AMERICIUM = SaltVersion("Americium" , info=3085) + CURIUM = SaltVersion("Curium" , info=3086) + BERKELIUM = SaltVersion("Berkelium" , info=3087) + CALIFORNIUM = SaltVersion("Californium" , info=3088) + EINSTEINIUM = SaltVersion("Einsteinium" , info=3089) + FERMIUM = SaltVersion("Fermium" , info=3090) + MENDELEVIUM = SaltVersion("Mendelevium" , info=3091) + NOBELIUM = SaltVersion("Nobelium" , info=3092) + LAWRENCIUM = SaltVersion("Lawrencium" , info=3093) + RUTHERFORDIUM = SaltVersion("Rutherfordium", info=3094) + DUBNIUM = SaltVersion("Dubnium" , info=3095) + SEABORGIUM = SaltVersion("Seaborgium" , info=3096) + BOHRIUM = SaltVersion("Bohrium" , info=3097) + HASSIUM = SaltVersion("Hassium" , info=3098) + MEITNERIUM = SaltVersion("Meitnerium" , info=3099) + DARMSTADTIUM = SaltVersion("Darmstadtium" , info=3100) + ROENTGENIUM = SaltVersion("Roentgenium" , info=3101) + COPERNICIUM = SaltVersion("Copernicium" , info=3102) + NIHONIUM = SaltVersion("Nihonium" , info=3103) + FLEROVIUM = SaltVersion("Flerovium" , info=3104) + MOSCOVIUM = SaltVersion("Moscovium" , info=3105) + LIVERMORIUM = SaltVersion("Livermorium" , info=3106) + TENNESSINE = SaltVersion("Tennessine" , info=3107) + OGANESSON = SaltVersion("Oganesson" , info=3108) # <---- Please refrain from fixing whitespace ----------------------------------- # The idea is to keep this readable. # ------------------------------------------------------------------------------- @@ -323,9 +324,7 @@ def __init__( self.mbugfix = mbugfix self.pre_type = pre_type self.pre_num = pre_num - if self.can_have_dot_zero(major): - vnames_key = (major, 0) - elif self.new_version(major): + if self.new_version(major): vnames_key = (major,) else: vnames_key = (major, minor) @@ -356,15 +355,13 @@ def parse(cls, version_string): ) match = cls.git_describe_regex.match(vstr) if not match: - raise ValueError( - "Unable to parse version string: '{}'".format(version_string) - ) + raise ValueError(f"Unable to parse version string: '{version_string}'") return cls(*match.groups()) @classmethod def from_name(cls, name): if name.lower() not in cls.LNAMES: - raise ValueError("Named version '{}' is not known".format(name)) + raise ValueError(f"Named version '{name}' is not known") return cls(*cls.LNAMES[name.lower()]) @classmethod @@ -447,22 +444,22 @@ def full_info_all_versions(self): @property def string(self): if self.new_version(self.major): - version_string = "{}".format(self.major) + version_string = f"{self.major}" if self.minor: - version_string = "{}.{}".format(self.major, self.minor) + version_string = f"{self.major}.{self.minor}" if not self.minor and self.can_have_dot_zero(self.major): - version_string = "{}.{}".format(self.major, self.minor) + version_string = f"{self.major}.{self.minor}" else: - version_string = "{}.{}.{}".format(self.major, self.minor, self.bugfix) + version_string = f"{self.major}.{self.minor}.{self.bugfix}" if self.mbugfix: - version_string += ".{}".format(self.mbugfix) + version_string += f".{self.mbugfix}" if self.pre_type: - version_string += "{}{}".format(self.pre_type, self.pre_num) + version_string += f"{self.pre_type}{self.pre_num}" if self.noc and self.sha: noc = self.noc if noc < 0: noc = "0na" - version_string += "+{}.{}".format(noc, self.sha) + version_string += f"+{noc}.{self.sha}" return version_string @property @@ -476,8 +473,12 @@ def formatted_version(self): version_string = self.string if self.sse: version_string += " Enterprise" - if (self.major, self.minor) in self.RMATCH: - version_string += " ({})".format(self.RMATCH[(self.major, self.minor)]) + if self.new_version(self.major): + rmatch_key = (self.major,) + else: + rmatch_key = (self.major, self.minor) + if rmatch_key in self.RMATCH: + version_string += f" ({self.RMATCH[rmatch_key]})" return version_string @property @@ -501,7 +502,7 @@ def __compare__(self, other, method): other = SaltStackVersion(*other) else: raise ValueError( - "Cannot instantiate Version from type '{}'".format(type(other)) + f"Cannot instantiate Version from type '{type(other)}'" ) pre_type = self.pre_index other_pre_type = other.pre_index @@ -550,24 +551,24 @@ def __gt__(self, other): def __repr__(self): parts = [] if self.name: - parts.append("name='{}'".format(self.name)) - parts.extend(["major={}".format(self.major), "minor={}".format(self.minor)]) + parts.append(f"name='{self.name}'") + parts.extend([f"major={self.major}", f"minor={self.minor}"]) if self.new_version(self.major): if not self.can_have_dot_zero(self.major) and not self.minor: parts.remove("".join([x for x in parts if re.search("^minor*", x)])) else: - parts.extend(["bugfix={}".format(self.bugfix)]) + parts.extend([f"bugfix={self.bugfix}"]) if self.mbugfix: - parts.append("minor-bugfix={}".format(self.mbugfix)) + parts.append(f"minor-bugfix={self.mbugfix}") if self.pre_type: - parts.append("{}={}".format(self.pre_type, self.pre_num)) + parts.append(f"{self.pre_type}={self.pre_num}") noc = self.noc if noc == -1: noc = "0na" if noc and self.sha: - parts.extend(["noc={}".format(noc), "sha={}".format(self.sha)]) + parts.extend([f"noc={noc}", f"sha={self.sha}"]) return "<{} {}>".format(self.__class__.__name__, " ".join(parts)) @@ -614,7 +615,7 @@ def __discover_version(saltstack_version): "v[0-9]*", "--always", ], - **kwargs + **kwargs, ) out, err = process.communicate() @@ -755,7 +756,7 @@ def system_information(): Report system versions. """ # Late import so that when getting called from setup.py does not break - from distro import linux_distribution + from salt.utils.platform import linux_distribution def system_version(): """ @@ -809,7 +810,7 @@ def system_version(): # ie: R2 if re.match(r"^R\d+$", item): release = item - release = "{}Server{}".format(version, release) + release = f"{version}Server{release}" else: for item in product_name.split(" "): # If it's a number, decimal number, Thin or Vista, then it's the @@ -909,7 +910,7 @@ def versions_report(include_salt_cloud=False, include_extensions=True): if ver_type == "Salt Extensions" and ver_type not in ver_info: # No salt Extensions to report continue - info.append("{}:".format(ver_type)) + info.append(f"{ver_type}:") # List dependencies in alphabetical, case insensitive order for name in sorted(ver_info[ver_type], key=lambda x: x.lower()): ver = fmt.format( @@ -921,5 +922,27 @@ def versions_report(include_salt_cloud=False, include_extensions=True): yield from info +def _parser(): + parser = argparse.ArgumentParser() + parser.add_argument( + "--next-release", help="Return the next release", action="store_true" + ) + parser.add_argument("--parse", help="Parse the passed string as a salt version") + # When pip installing we pass in other args to this script. + # This allows us to catch those args but not use them + parser.add_argument("unknown", nargs=argparse.REMAINDER) + return parser.parse_args() + + if __name__ == "__main__": - print(__version__) + args = _parser() + if args.next_release: + print(__saltstack_version__.next_release()) + elif args.parse: + try: + print(SaltStackVersion.parse(args.parse)) + except Exception as exc: # pylint: disable=broad-except + print(f"Failed to parse '{args.parse}' as a salt version: {exc}") + sys.exit(1) + else: + print(__version__) diff --git a/salt/wheel/file_roots.py b/salt/wheel/file_roots.py index 77c888a146ed..c25bb883fbe4 100644 --- a/salt/wheel/file_roots.py +++ b/salt/wheel/file_roots.py @@ -19,7 +19,7 @@ def find(path, saltenv="base"): return ret for root in __opts__["file_roots"][saltenv]: full = os.path.join(root, path) - if not salt.utils.verify.clean_path(root, full): + if not salt.utils.verify.clean_path(root, full, subdir=True): continue if os.path.isfile(full): # Add it to the dict @@ -74,7 +74,7 @@ def list_roots(): def read(path, saltenv="base"): """ - Read the contents of a text file, if the file is binary then + Read the contents of a text file, if the file is binary then ignore it """ # Return a dict of paths + content ret = [] diff --git a/setup.cfg b/setup.cfg index f99baf45528d..2f452d876955 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,10 +3,22 @@ owner = root group = root [mypy] +packages = tools +exclude = (?x)( + salt + | tests + ).*\.py implicit_optional = True show_error_codes = True warn_return_any = True warn_unused_configs = True -[mypy.tools] +[mypy-tools.*] +ignore_missing_imports = True + +[mypy-tools.precommit.docstrings] +follow_imports = silent + +[mypy-salt.*] +follow_imports = silent ignore_missing_imports = True diff --git a/setup.py b/setup.py index 3bfc5534af85..c2992548c934 100755 --- a/setup.py +++ b/setup.py @@ -5,15 +5,12 @@ # pylint: disable=file-perms,resource-leakage import setuptools # isort:skip - -import contextlib import distutils.dist import glob import os -import platform +import subprocess import sys import warnings -from ctypes.util import find_library from datetime import datetime # pylint: disable=no-name-in-module @@ -173,8 +170,9 @@ with open(SALT_VERSION_HARDCODED, encoding="utf-8") as rfh: SALT_VERSION = rfh.read().strip() else: - exec(compile(open(SALT_VERSION_MODULE).read(), SALT_VERSION_MODULE, "exec")) - SALT_VERSION = str(__saltstack_version__) # pylint: disable=undefined-variable + SALT_VERSION = ( + subprocess.check_output([sys.executable, SALT_VERSION_MODULE]).decode().strip() + ) # pylint: enable=W0122 @@ -371,12 +369,6 @@ def finalize_options(self): self.generate_salt_syspaths = True def run(self): - if IS_WINDOWS_PLATFORM: - # Download the required DLLs - self.distribution.salt_download_windows_dlls = True - self.run_command("download-windows-dlls") - self.distribution.salt_download_windows_dlls = None - if self.write_salt_version is True: self.distribution.running_salt_install = True self.distribution.salt_version_hardcoded_path = SALT_VERSION_HARDCODED @@ -390,91 +382,6 @@ def run(self): develop.run(self) -class DownloadWindowsDlls(Command): - - description = "Download required DLL's for windows" - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - if getattr(self.distribution, "salt_download_windows_dlls", None) is None: - print("This command is not meant to be called on it's own") - exit(1) - try: - import pip - - # pip has moved many things to `_internal` starting with pip 10 - if LooseVersion(pip.__version__) < LooseVersion("10.0"): - # pylint: disable=no-name-in-module - from pip.utils.logging import indent_log - - # pylint: enable=no-name-in-module - else: - from pip._internal.utils.logging import ( # pylint: disable=no-name-in-module - indent_log, - ) - except ImportError: - # TODO: Impliment indent_log here so we don't require pip - @contextlib.contextmanager - def indent_log(): - yield - - platform_bits, _ = platform.architecture() - url = "https://repo.saltproject.io/windows/dependencies/{bits}/{fname}" - dest = os.path.join(os.path.dirname(sys.executable), "{fname}") - with indent_log(): - for fname in ( - "openssl/1.1.1k/ssleay32.dll", - "openssl/1.1.1k/libeay32.dll", - ): - # See if the library is already on the system - if find_library(fname): - continue - furl = url.format(bits=platform_bits[:2], fname=fname) - fdest = dest.format(fname=os.path.basename(fname)) - if not os.path.exists(fdest): - log.info("Downloading {} to {} from {}".format(fname, fdest, furl)) - try: - from contextlib import closing - - import requests - - with closing(requests.get(furl, stream=True)) as req: - if req.status_code == 200: - with open(fdest, "wb") as wfh: - for chunk in req.iter_content(chunk_size=4096): - if chunk: # filter out keep-alive new chunks - wfh.write(chunk) - wfh.flush() - else: - log.error( - "Failed to download {} to {} from {}".format( - fname, fdest, furl - ) - ) - except ImportError: - req = urlopen(furl) - - if req.getcode() == 200: - with open(fdest, "wb") as wfh: - while True: - chunk = req.read(4096) - if not chunk: - break - wfh.write(chunk) - wfh.flush() - else: - log.error( - "Failed to download {} to {} from {}".format( - fname, fdest, furl - ) - ) - - class Sdist(sdist): def make_release_tree(self, base_dir, files): if self.distribution.ssh_packaging: @@ -740,11 +647,6 @@ def run(self): self.distribution.salt_version_hardcoded_path = os.path.join( self.build_lib, "salt", "_version.txt" ) - if IS_WINDOWS_PLATFORM: - # Download the required DLLs - self.distribution.salt_download_windows_dlls = True - self.run_command("download-windows-dlls") - self.distribution.salt_download_windows_dlls = None # need to ensure _version.txt is created in build dir before install if not os.path.exists(os.path.join(self.build_lib)): if not self.skip_build: @@ -987,8 +889,6 @@ def __init__(self, attrs=None): ) if not IS_WINDOWS_PLATFORM: self.cmdclass.update({"sdist": CloudSdist, "install_lib": InstallLib}) - if IS_WINDOWS_PLATFORM: - self.cmdclass.update({"download-windows-dlls": DownloadWindowsDlls}) if HAS_BDIST_WHEEL: self.cmdclass["bdist_wheel"] = BDistWheel diff --git a/tasks/README.md b/tasks/README.md deleted file mode 100644 index 6ff3fb10a7df..000000000000 --- a/tasks/README.md +++ /dev/null @@ -1,28 +0,0 @@ -# What is this directory? - -This directory contains python scripts which should be called by [invoke](https://pypi.org/project/invoke). - -Instead of having several multi-purpose python scripts scatered through multiple paths in the salt code base, -we will now concentrate them under an invoke task. - -## Calling Invoke - -Invoke can be called in the following ways. - -### Installed system-wide - -If invoke is installed system-wide, be sure you also have `blessings` installed if you want coloured output, although -it's not a hard requirement. - -``` -inv docs.check -``` - -### Using Nox - -Since salt already uses nox, and nox manages virtual environments and respective requirements, calling invoke is as -simple as: - -``` -nox -e invoke -- docs.check -``` diff --git a/tasks/__init__.py b/tasks/__init__.py deleted file mode 100644 index 5f5aac88cb86..000000000000 --- a/tasks/__init__.py +++ /dev/null @@ -1,11 +0,0 @@ -from invoke import Collection # pylint: disable=3rd-party-module-not-gated - -from . import docs, docstrings, filemap, loader - -ns = Collection() -ns.add_collection(Collection.from_module(docs, name="docs"), name="docs") -ns.add_collection( - Collection.from_module(docstrings, name="docstrings"), name="docstrings" -) -ns.add_collection(Collection.from_module(loader, name="loader"), name="loader") -ns.add_collection(Collection.from_module(filemap, name="filemap"), name="filemap") diff --git a/tasks/filemap.py b/tasks/filemap.py deleted file mode 100644 index a1eb62c6b82f..000000000000 --- a/tasks/filemap.py +++ /dev/null @@ -1,95 +0,0 @@ -""" - tasks.filemap - ~~~~~~~~~~~~~ - - tests/filename_map.yml validity checks -""" -import pathlib -import re - -import yaml -from invoke import task # pylint: disable=3rd-party-module-not-gated - -from tasks import utils - -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -FILENAME_MAP_PATH = CODE_DIR / "tests" / "filename_map.yml" - - -def _match_to_test_file(match): - tests_path = CODE_DIR / "tests" - parts = match.split(".") - parts[-1] += ".py" - return tests_path.joinpath(*parts).relative_to(CODE_DIR) - - -def _check_matches(rule, matches): - errors = 0 - for match in matches: - filematch = _match_to_test_file(match) - if not filematch.exists(): - utils.error( - "The match '{}' for rule '{}' points to a non existing test module" - " path: {}", - match, - rule, - filematch, - ) - errors += 1 - return errors - - -@task -def check(ctx): - exitcode = 0 - excludes = ("tasks/", "templates/", ".nox/") - full_filelist = [path.relative_to(CODE_DIR) for path in CODE_DIR.rglob("*.py")] - filelist = [ - str(path) for path in full_filelist if not str(path).startswith(excludes) - ] - filename_map = yaml.safe_load(FILENAME_MAP_PATH.read_text()) - checked = set() - for rule, matches in filename_map.items(): - if rule == "*": - exitcode += _check_matches(rule, matches) - elif "|" in rule: - # This is regex - for filepath in filelist: - if re.match(rule, filepath): - # Found at least one match, stop looking - break - else: - utils.error( - "Could not find a matching file in the salt repo for the rule '{}'", - rule, - ) - exitcode += 1 - continue - exitcode += _check_matches(rule, matches) - elif "*" in rule or "\\" in rule: - # Glob matching - process_matches = True - for filerule in CODE_DIR.glob(rule): - if not filerule.exists(): - utils.error( - "The rule '{}' points to a non existing path: {}", - rule, - filerule, - ) - exitcode += 1 - process_matches = False - if process_matches: - exitcode += _check_matches(rule, matches) - else: - # Direct file paths as rules - filerule = pathlib.Path(rule) - if not filerule.exists(): - utils.error( - "The rule '{}' points to a non existing path: {}", rule, filerule - ) - exitcode += 1 - continue - exitcode += _check_matches(rule, matches) - if exitcode: - utils.error("Found {} errors", exitcode) - utils.exit_invoke(exitcode) diff --git a/tasks/utils.py b/tasks/utils.py deleted file mode 100644 index e082508a5a32..000000000000 --- a/tasks/utils.py +++ /dev/null @@ -1,64 +0,0 @@ -""" - tasks.utils - ~~~~~~~~~~~ - - Invoke utilities -""" - -import sys - -try: - from blessings import Terminal - - try: - terminal = Terminal() - HAS_BLESSINGS = True - except Exception: # pylint: disable=broad-except - terminal = None - HAS_BLESSINGS = False -except ImportError: - terminal = None - HAS_BLESSINGS = False - - -def exit_invoke(exitcode, message=None, *args, **kwargs): - if message is not None: - if exitcode > 0: - warn(message, *args, **kwargs) - else: - info(message, *args, **kwargs) - sys.exit(exitcode) - - -def info(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.green(message)) - write_message(message) - - -def warn(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.yellow(message)) - write_message(message) - - -def error(message, *args, **kwargs): - if not isinstance(message, str): - message = str(message) - message = message.format(*args, **kwargs) - if terminal: - message = terminal.bold(terminal.red(message)) - write_message(message) - - -def write_message(message): - sys.stderr.write(message) - if not message.endswith("\n"): - sys.stderr.write("\n") - sys.stderr.flush() diff --git a/tests/__init__.py b/tests/__init__.py index 909f68753c3e..219b3c78a754 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -2,19 +2,14 @@ import pathlib import sys -_repo_root = pathlib.Path(__file__).parent.parent -_paths_to_check = {""} -if sys.platform.startswith("win"): - _paths_to_check.add(str(_repo_root).replace("\\", "\\\\")) - _paths_to_check.add(str(_repo_root.resolve()).replace("\\", "\\\\")) -else: - _paths_to_check.add(str(_repo_root)) - +_repo_root = pathlib.Path(__file__).resolve().parent.parent if os.environ.get("ONEDIR_TESTRUN", "0") == "1": # In this particular case, we want to make sure that the repo root # is not part if sys.path so that when we import salt, we import salt from # the onedir and not the code checkout - for path in _paths_to_check: - if path in sys.path: + for path in list(sys.path): + if path == "": + sys.path.remove(path) + elif pathlib.Path(path).resolve() == _repo_root: sys.path.remove(path) diff --git a/tests/conftest.py b/tests/conftest.py index 6f387e95d747..f0d8d71b496f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -15,6 +15,7 @@ import _pytest.logging import _pytest.skipping +import more_itertools import psutil import pytest @@ -66,8 +67,11 @@ # Flag coverage to track suprocesses by pointing it to the right .coveragerc file os.environ["COVERAGE_PROCESS_START"] = str(COVERAGERC_FILE) +# Variable defining a FIPS test run or not +FIPS_TESTRUN = os.environ.get("FIPS_TESTRUN", "0") == "1" + # Define the pytest plugins we rely on -pytest_plugins = ["tempdir", "helpers_namespace"] +pytest_plugins = ["helpers_namespace"] # Define where not to collect tests from collect_ignore = ["setup.py"] @@ -112,17 +116,6 @@ class LiveLoggingStreamHandler( log = logging.getLogger("salt.testsuite") -# ----- PyTest Tempdir Plugin Hooks ---------------------------------------------------------------------------------> -def pytest_tempdir_basename(): - """ - Return the temporary directory basename for the salt test suite. - """ - return "stsuite" - - -# <---- PyTest Tempdir Plugin Hooks ---------------------------------------------------------------------------------- - - # ----- CLI Options Setup -------------------------------------------------------------------------------------------> def pytest_addoption(parser): """ @@ -168,18 +161,52 @@ def pytest_addoption(parser): ), ) test_selection_group.addoption( - "--proxy", - "--proxy-tests", - dest="proxy", + "--no-fast", + "--no-fast-tests", + dest="fast", action="store_true", default=False, - help="Run proxy tests (DEPRECATED)", + help="Don't run salt-fast tests. Default: %(default)s", ) test_selection_group.addoption( "--run-slow", + "--slow", + "--slow-tests", + dest="slow", + action="store_true", + default=False, + help="Run slow tests. Default: %(default)s", + ) + test_selection_group.addoption( + "--core", + "--core-tests", + dest="core", + action="store_true", + default=False, + help=( + "Run salt-core tests. These tests test the engine of salt! " + "Default: %(default)s" + ), + ) + test_selection_group.addoption( + "--flaky", + "--flaky-jail", + dest="flaky", action="store_true", default=False, - help="Run slow tests.", + help=( + "Run salt-flaky jail tests. These tests are in jail for being flaky! " + "One day they will be made not flaky." + "Default: %(default)s" + ), + ) + test_selection_group.addoption( + "--proxy", + "--proxy-tests", + dest="proxy", + action="store_true", + default=False, + help="Run proxy tests (DEPRECATED)", ) output_options_group = parser.getgroup("Output Options") @@ -224,18 +251,19 @@ def pytest_configure(config): called after command line options have been parsed and all plugins and initial conftest files been loaded. """ - # try: - # assert config._onedir_check_complete - # return - # except AttributeError: - # if os.environ.get("ONEDIR_TESTRUN", "0") == "1": - # if pathlib.Path(salt.__file__).parent == CODE_DIR / "salt": - # raise pytest.UsageError( - # "Apparently running the test suite against the onedir build " - # "of salt, however, the imported salt package is pointing to " - # "the respository checkout instead of the onedir package." - # ) - # config._onedir_check_complete = True + try: + assert config._onedir_check_complete + return + except AttributeError: + if os.environ.get("ONEDIR_TESTRUN", "0") == "1": + if pathlib.Path(salt.__file__).parent == CODE_DIR / "salt": + raise pytest.UsageError( + "Apparently running the test suite against the onedir build " + "of salt, however, the imported salt package is pointing to " + "the respository checkout instead of the onedir package.\n\n" + f" * sys.path: {sys.path}" + ) + config._onedir_check_complete = True for dirname in CODE_DIR.iterdir(): if not dirname.is_dir(): @@ -269,6 +297,16 @@ def pytest_configure(config): "slow_test: Mark test as being slow. These tests are skipped by default unless" " `--run-slow` is passed", ) + config.addinivalue_line( + "markers", + "core_test: Mark test as being core. These tests are skipped by default unless" + " `--core-tests` is passed", + ) + config.addinivalue_line( + "markers", + "flaky_jail: Mark test as being jlaky. These tests are skipped by default unless" + " `--flaky-jail` is passed", + ) config.addinivalue_line( "markers", "async_timeout: Timeout, in seconds, for asynchronous test functions(`async def`)", @@ -362,9 +400,10 @@ def set_max_open_files_limits(min_soft=3072, min_hard=4096): return soft, hard -def pytest_report_header(): +def pytest_report_header(config): soft, hard = set_max_open_files_limits() - return "max open files; soft: {}; hard: {}".format(soft, hard) + transport = config.getoption("--transport") + return f"max open files: soft={soft}; hard={hard}\nsalt-transport: {transport}" def pytest_itemcollected(item): @@ -533,11 +572,39 @@ def pytest_runtest_setup(item): ): item._skipped_by_mark = True pytest.skip(PRE_PYTEST_SKIP_REASON) + test_group_count = sum( + bool(item.get_closest_marker(group)) + for group in ("core_test", "slow_test", "flaky_jail") + ) + if item.get_closest_marker("core_test") and item.get_closest_marker("slow_test"): + raise pytest.UsageError( + "Tests can only be in one test group. ('core_test', 'slow_test')" + ) - if item.get_closest_marker("slow_test"): - if item.config.getoption("--run-slow") is False: - item._skipped_by_mark = True - pytest.skip("Slow tests are disabled!") + if item.get_closest_marker("flaky_jail"): + if not item.config.getoption("--flaky-jail"): + raise pytest.skip.Exception( + "flaky jail tests are disabled, pass '--flaky-jail' to enable them.", + _use_item_location=True, + ) + else: + if item.get_closest_marker("core_test"): + if not item.config.getoption("--core-tests"): + raise pytest.skip.Exception( + "Core tests are disabled, pass '--core-tests' to enable them.", + _use_item_location=True, + ) + if item.get_closest_marker("slow_test"): + if not item.config.getoption("--slow-tests"): + raise pytest.skip.Exception( + "Slow tests are disabled, pass '--run-slow' to enable them.", + _use_item_location=True, + ) + if test_group_count == 0 and item.config.getoption("--no-fast-tests"): + raise pytest.skip.Exception( + "Fast tests have been disabled by '--no-fast-tests'.", + _use_item_location=True, + ) requires_sshd_server_marker = item.get_closest_marker("requires_sshd_server") if requires_sshd_server_marker is not None: @@ -737,33 +804,6 @@ def pytest_runtest_setup(item): # ----- Test Groups Selection ---------------------------------------------------------------------------------------> -def get_group_size_and_start(total_items, total_groups, group_id): - """ - Calculate group size and start index. - """ - base_size = total_items // total_groups - rem = total_items % total_groups - - start = base_size * (group_id - 1) + min(group_id - 1, rem) - size = base_size + 1 if group_id <= rem else base_size - - return (start, size) - - -def get_group(items, total_groups, group_id): - """ - Get the items from the passed in group based on group size. - """ - if not 0 < group_id <= total_groups: - raise ValueError("Invalid test-group argument") - - start, size = get_group_size_and_start(len(items), total_groups, group_id) - selected = items[start : start + size] - deselected = items[:start] + items[start + size :] - assert len(selected) + len(deselected) == len(items) - return selected, deselected - - def groups_collection_modifyitems(config, items): group_count = config.getoption("test-group-count") group_id = config.getoption("test-group") @@ -772,17 +812,38 @@ def groups_collection_modifyitems(config, items): # We're not selection tests using groups, don't do any filtering return + if group_count == 1: + # Just one group, don't do any filtering + return + + terminal_reporter = config.pluginmanager.get_plugin("terminalreporter") + + if config.getoption("--last-failed") or config.getoption("--failed-first"): + # This is a test failure rerun, applying test groups would break this + terminal_reporter.write( + "\nNot splitting collected tests into chunks since --lf/--last-failed or " + "-ff/--failed-first was passed on the CLI.\n", + yellow=True, + ) + return + total_items = len(items) - tests_in_group, deselected = get_group(items, group_count, group_id) + # Devide into test groups + test_groups = more_itertools.divide(group_count, items) + # Pick the right group + tests_in_group = list(test_groups.pop(group_id - 1)) + # The rest are deselected tests + deselected = list(more_itertools.collapse(test_groups)) + # Sanity check + assert len(tests_in_group) + len(deselected) == total_items # Replace all items in the list items[:] = tests_in_group if deselected: config.hook.pytest_deselected(items=deselected) - terminal_reporter = config.pluginmanager.get_plugin("terminalreporter") terminal_reporter.write( - "Running test group #{} ({} tests)\n".format(group_id, len(items)), + f"Running test group #{group_id}(out of #{group_count}) ({len(items)} out of {total_items} tests)\n", yellow=True, ) @@ -791,6 +852,22 @@ def groups_collection_modifyitems(config, items): # ----- Fixtures Overrides ------------------------------------------------------------------------------------------> +@pytest.fixture(scope="session") +def salt_factories_default_root_dir(salt_factories_default_root_dir): + """ + The root directory from where to base all salt-factories paths. + + For example, in a salt system installation, this would be ``/``. + + .. admonition:: Attention + + If `root_dir` is returned on the `salt_factories_config()` fixture + dictionary, then that's the value used, and not the one returned by + this fixture. + """ + return salt_factories_default_root_dir / "stsuite" + + @pytest.fixture(scope="session") def salt_factories_config(): """ @@ -980,7 +1057,10 @@ def salt_syndic_master_factory( config_defaults["syndic_master"] = "localhost" config_defaults["transport"] = request.config.getoption("--transport") - config_overrides = {"log_level_logfile": "quiet"} + config_overrides = { + "log_level_logfile": "quiet", + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -1093,7 +1173,10 @@ def salt_master_factory( config_defaults["syndic_master"] = "localhost" config_defaults["transport"] = salt_syndic_master_factory.config["transport"] - config_overrides = {"log_level_logfile": "quiet"} + config_overrides = { + "log_level_logfile": "quiet", + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -1201,6 +1284,7 @@ def salt_minion_factory(salt_master_factory): "log_level_logfile": "quiet", "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() @@ -1232,6 +1316,7 @@ def salt_sub_minion_factory(salt_master_factory): "log_level_logfile": "quiet", "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() @@ -1360,13 +1445,15 @@ def sshd_server(salt_factories, sshd_config_dir, salt_master, grains): "/usr/libexec/openssh/sftp-server", # Arch Linux "/usr/lib/ssh/sftp-server", + # Photon OS 5 + "/usr/libexec/sftp-server", ] sftp_server_path = None for path in sftp_server_paths: if os.path.exists(path): sftp_server_path = path if sftp_server_path is None: - log.warning(f"Failed to find 'sftp-server'. Searched: {sftp_server_paths}") + pytest.fail(f"Failed to find 'sftp-server'. Searched: {sftp_server_paths}") else: sshd_config_dict["Subsystem"] = f"sftp {sftp_server_path}" factory = salt_factories.get_sshd_daemon( diff --git a/tests/filename_map.yml b/tests/filename_map.yml index 72527e089415..ded66d324101 100644 --- a/tests/filename_map.yml +++ b/tests/filename_map.yml @@ -30,8 +30,8 @@ salt/modules/(aix_group|groupadd|mac_group|pw_group|solaris_group|win_groupadd)\ salt/modules/(debian_service|freebsdservice|gentoo_service|launchctl_service|mac_service|netbsdservice|openbsdrcctl_service|openbsdservice|rh_service|runit|linux_service|smf_service|systemd_service|upstart_service|win_service)\.py: - pytests.unit.states.test_service - - integration.modules.test_service - - integration.states.test_service + - pytests.functional.modules.test_service + - pytests.functional.states.test_service salt/modules/ansiblegate.py: @@ -323,6 +323,9 @@ salt/(minion\.py|channel/.+|transport/.+): tests/support/mock.py: - unit.test_mock +tests/support/virt.py: + - pytests.integration.modules.test_virt + tests/support/pytest/mysql.py: - pytests.functional.states.test_mysql - pytests.functional.modules.test_mysql @@ -337,3 +340,8 @@ tests/pytests/scenarios/multimaster: - pytests.scenarios.multimaster.beacons.test_inotify - pytests.scenarios.multimaster.modules.test_test - pytests.scenarios.failover.multimaster.test_failover_master + +tests/integration/files/ssh/.*: + - integration.modules.test_ssh + - integration.states.test_ssh_auth + - integration.states.test_ssh_known_hosts diff --git a/tests/integration/cloud/clouds/test_digitalocean.py b/tests/integration/cloud/clouds/test_digitalocean.py index e92f57d8aa25..64ad0f17426c 100644 --- a/tests/integration/cloud/clouds/test_digitalocean.py +++ b/tests/integration/cloud/clouds/test_digitalocean.py @@ -1,10 +1,11 @@ """ Integration tests for DigitalOcean APIv2 """ - import base64 import hashlib +import pytest + import salt.crypt import salt.utils.stringutils from tests.integration.cloud.helpers.cloud_test_base import TIMEOUT, CloudTest @@ -43,6 +44,7 @@ def test_list_sizes(self): _list_sizes = self.run_cloud("--list-sizes {}".format(self.PROVIDER)) self.assertIn("16gb", [i.strip() for i in _list_sizes]) + @pytest.mark.skip_on_fips_enabled_platform def test_key_management(self): """ Test key management diff --git a/tests/integration/externalapi/test_venafiapi.py b/tests/integration/externalapi/test_venafiapi.py index ad08605430f0..c9d44dce50c2 100644 --- a/tests/integration/externalapi/test_venafiapi.py +++ b/tests/integration/externalapi/test_venafiapi.py @@ -43,13 +43,10 @@ class VenafiTest(ShellCase): @with_random_name @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_request(self, name): cn = "{}.example.com".format(name) - # Provide python27 compatibility - if not isinstance(cn, str): - cn = cn.decode() - ret = self.run_run_plus( fun="venafi.request", minion_id=cn, @@ -126,10 +123,6 @@ def test_sign(self, name): csr_path = f.name cn = "test-csr-32313131.venafi.example.com" - # Provide python27 compatibility - if not isinstance(cn, str): - cn = cn.decode() - ret = self.run_run_plus( fun="venafi.request", minion_id=cn, csr_path=csr_path, zone="fake" ) diff --git a/tests/integration/files/file/base/_states/salttest.py b/tests/integration/files/file/base/_states/salttest.py index 596e71213150..e7288a9a95d9 100644 --- a/tests/integration/files/file/base/_states/salttest.py +++ b/tests/integration/files/file/base/_states/salttest.py @@ -6,6 +6,3 @@ def hello(name): return "hello " + name - - -# vim:set et sts=4 ts=4 tw=80: diff --git a/tests/integration/files/ssh/authorized_keys b/tests/integration/files/ssh/authorized_keys index 6e23c8561a0a..ac8b7e209764 100644 --- a/tests/integration/files/ssh/authorized_keys +++ b/tests/integration/files/ssh/authorized_keys @@ -1 +1 @@ -command="/usr/local/lib/ssh-helper" ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== github.com +command="/usr/local/lib/ssh-helper" ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk= github.com diff --git a/tests/integration/files/ssh/known_hosts b/tests/integration/files/ssh/known_hosts index aa02480ca8a3..01239ee94ba8 100644 --- a/tests/integration/files/ssh/known_hosts +++ b/tests/integration/files/ssh/known_hosts @@ -1,3 +1,3 @@ -|1|muzcBqgq7+ByUY7aLICytOff8UI=|rZ1JBNlIOqRnwwsJl9yP+xMxgf8= ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== +|1|muzcBqgq7+ByUY7aLICytOff8UI=|rZ1JBNlIOqRnwwsJl9yP+xMxgf8= ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQCj7ndNxQowgcQnjshcLrqPEiiphnt+VTTvDP6mHBL9j1aNUkY4Ue1gvwnGLVlOhGeYrnZaMgRK6+PKCUXaDbC7qtbW8gIkhL7aGCsOr/C56SJMy/BCZfxd1nWzAOxSDPgVsmerOBYfNqltV9/hWCqBywINIR+5dIg6JTJ72pcEpEjcYgXkE2YEFXV1JHnsKgbLWNlhScqb2UmyRkQyytRLtL+38TGxkxCflmO+5Z8CSSNY7GidjMIZ7Q4zMjA2n1nGrlTDkzwDCsw+wqFPGQA179cnfGWOWRVruj16z6XyvxvjJwbz0wQZ75XK5tKSb7FNyeIEs4TT4jk+S4dhPeAUC5y+bDYirYgM4GC7uEnztnZyaVWQ7B381AK4Qdrwt51ZqExKbQpTUNn+EjqoTwvqNj4kqx5QUCI0ThS/YkOxJCXmPUWZbhjpCg56i+2aB6CmK2JGhn57K5mj0MNdBXA4/WnwH6XoPWJzK5Nyu2zB3nAZp+S5hpQs+p1vN1/wsjk= github.com ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBEmKSENjQEezOmxkZMy7opKgwFB9nkt5YRrYMjNuG5N87uRgg6CLrbo5wAdT/y6v0mKV0U2w0WZ2YB/++Tpockg= github.com ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIOMqqnkVzrm0SdG6UOoqKLsabgH5C9okWi0dh2l9GKJl diff --git a/tests/integration/files/ssh/raw b/tests/integration/files/ssh/raw deleted file mode 100644 index 3e58ba81e02b..000000000000 --- a/tests/integration/files/ssh/raw +++ /dev/null @@ -1 +0,0 @@ -AAAAB3NzaC1yc2EAAAABIwAAAQEAq2A7hRGmdnm9tUDbO9IDSwBK6TbQa+PXYPCPy6rbTrTtw7PHkccKrpp0yVhp5HdEIcKr6pLlVDBfOLX9QUsyCOV0wzfjIJNlGEYsdlLJizHhbn2mUjvSAHQqZETYP81eFzLQNnPHt4EVVUh7VfDESU84KezmD5QlWpXLmvU31/yMf+Se8xhHTvKSCZIFImWwoG6mbUoWf9nzpIoaSjB+weqqUUmpaaasXVal72J+UX2B+2RPW3RcT0eOzQgqlJL3RKrTJvdsjE3JEAvGq3lGHSZXy28G3skua2SmVi/w4yCE6gbODqnTWlg7+wC604ydGXA8VJiS5ap43JXiUFFAaQ== diff --git a/tests/integration/modules/test_chocolatey.py b/tests/integration/modules/test_chocolatey.py index 102a3716b6bd..188e5446f487 100644 --- a/tests/integration/modules/test_chocolatey.py +++ b/tests/integration/modules/test_chocolatey.py @@ -9,6 +9,7 @@ @pytest.mark.skip_unless_on_windows @pytest.mark.windows_whitelisted @pytest.mark.destructive_test +@pytest.mark.slow_test class ChocolateyModuleTest(ModuleCase): """ Validate Chocolatey module diff --git a/tests/integration/modules/test_cp.py b/tests/integration/modules/test_cp.py index ad7538b4ba8e..af873bb67848 100644 --- a/tests/integration/modules/test_cp.py +++ b/tests/integration/modules/test_cp.py @@ -89,12 +89,12 @@ def test_get_file_gzipped(self, tgt): """ src = os.path.join(RUNTIME_VARS.FILES, "file", "base", "file.big") with salt.utils.files.fopen(src, "rb") as fp_: - hash_str = hashlib.md5(fp_.read()).hexdigest() + hash_str = hashlib.sha256(fp_.read()).hexdigest() self.run_function("cp.get_file", ["salt://file.big", tgt], gzip=5) with salt.utils.files.fopen(tgt, "rb") as scene: data = scene.read() - self.assertEqual(hash_str, hashlib.md5(data).hexdigest()) + self.assertEqual(hash_str, hashlib.sha256(data).hexdigest()) data = salt.utils.stringutils.to_unicode(data) self.assertIn("KNIGHT: They're nervous, sire.", data) self.assertNotIn("bacon", data) @@ -234,9 +234,9 @@ def test_get_url_https(self, tgt): self.run_function("cp.get_url", ["https://repo.saltproject.io/index.html", tgt]) with salt.utils.files.fopen(tgt, "r") as instructions: data = salt.utils.stringutils.to_unicode(instructions.read()) - self.assertIn("Bootstrap", data) - self.assertIn("Debian", data) - self.assertIn("Windows", data) + self.assertIn("Salt Project", data) + self.assertIn("Package", data) + self.assertIn("Repo", data) self.assertNotIn("AYBABTU", data) @pytest.mark.slow_test @@ -250,9 +250,9 @@ def test_get_url_https_dest_empty(self): with salt.utils.files.fopen(ret, "r") as instructions: data = salt.utils.stringutils.to_unicode(instructions.read()) - self.assertIn("Bootstrap", data) - self.assertIn("Debian", data) - self.assertIn("Windows", data) + self.assertIn("Salt Project", data) + self.assertIn("Package", data) + self.assertIn("Repo", data) self.assertNotIn("AYBABTU", data) @pytest.mark.slow_test @@ -273,9 +273,9 @@ def test_get_url_https_no_dest(self): time.sleep(sleep) if ret.find("HTTP 599") != -1: raise Exception("https://repo.saltproject.io/index.html returned 599 error") - self.assertIn("Bootstrap", ret) - self.assertIn("Debian", ret) - self.assertIn("Windows", ret) + self.assertIn("Salt Project", ret) + self.assertIn("Package", ret) + self.assertIn("Repo", ret) self.assertNotIn("AYBABTU", ret) @pytest.mark.slow_test @@ -346,9 +346,9 @@ def test_get_file_str_https(self): """ src = "https://repo.saltproject.io/index.html" ret = self.run_function("cp.get_file_str", [src]) - self.assertIn("Bootstrap", ret) - self.assertIn("Debian", ret) - self.assertIn("Windows", ret) + self.assertIn("Salt Project", ret) + self.assertIn("Package", ret) + self.assertIn("Repo", ret) self.assertNotIn("AYBABTU", ret) @pytest.mark.slow_test diff --git a/tests/integration/modules/test_jinja.py b/tests/integration/modules/test_jinja.py deleted file mode 100644 index 70b45bf0f232..000000000000 --- a/tests/integration/modules/test_jinja.py +++ /dev/null @@ -1,76 +0,0 @@ -""" -Test the jinja module -""" - -import os - -import salt.utils.files -import salt.utils.json -import salt.utils.yaml -from tests.support.case import ModuleCase -from tests.support.helpers import requires_system_grains -from tests.support.runtests import RUNTIME_VARS - - -class TestModulesJinja(ModuleCase): - """ - Test the jinja map module - """ - - def _path(self, name, absolute=False): - path = os.path.join("modules", "jinja", name) - if absolute: - return os.path.join(RUNTIME_VARS.BASE_FILES, path) - else: - return path - - def test_import_json(self): - json_file = "osarchmap.json" - ret = self.run_function("jinja.import_json", [self._path(json_file)]) - with salt.utils.files.fopen(self._path(json_file, absolute=True)) as fh_: - self.assertDictEqual(salt.utils.json.load(fh_), ret) - - def test_import_yaml(self): - yaml_file = "defaults.yaml" - ret = self.run_function("jinja.import_yaml", [self._path(yaml_file)]) - with salt.utils.files.fopen(self._path(yaml_file, absolute=True)) as fh_: - self.assertDictEqual(salt.utils.yaml.safe_load(fh_), ret) - - @requires_system_grains - def test_load_map(self, grains): - ret = self.run_function("jinja.load_map", [self._path("map.jinja"), "template"]) - - assert isinstance( - ret, dict - ), "failed to return dictionary from jinja.load_map: {}".format(ret) - - with salt.utils.files.fopen(self._path("defaults.yaml", absolute=True)) as fh_: - defaults = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen(self._path("osarchmap.json", absolute=True)) as fh_: - osarchmap = salt.utils.json.load(fh_) - with salt.utils.files.fopen( - self._path("osfamilymap.yaml", absolute=True) - ) as fh_: - osfamilymap = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen(self._path("osmap.yaml", absolute=True)) as fh_: - osmap = salt.utils.yaml.safe_load(fh_) - with salt.utils.files.fopen( - self._path("osfingermap.yaml", absolute=True) - ) as fh_: - osfingermap = salt.utils.yaml.safe_load(fh_) - - self.assertEqual( - ret.get("arch"), osarchmap.get(grains["osarch"], {}).get("arch") - ) - self.assertEqual( - ret.get("config"), - osfingermap.get(grains["osfinger"], {}).get( - "config", - osmap.get(grains["os"], {}).get( - "config", - osfamilymap.get(grains["os_family"], {}).get( - "config", defaults.get("template").get("config") - ), - ), - ), - ) diff --git a/tests/integration/modules/test_linux_shadow.py b/tests/integration/modules/test_linux_shadow.py index 73a18a8dc815..aaea75d8e456 100644 --- a/tests/integration/modules/test_linux_shadow.py +++ b/tests/integration/modules/test_linux_shadow.py @@ -15,6 +15,7 @@ @pytest.mark.skip_if_not_root @pytest.mark.skip_unless_on_linux +@pytest.mark.slow_test class ShadowModuleTest(ModuleCase): """ Validate the linux shadow system module diff --git a/tests/integration/modules/test_mac_assistive.py b/tests/integration/modules/test_mac_assistive.py deleted file mode 100644 index 5c435def9807..000000000000 --- a/tests/integration/modules/test_mac_assistive.py +++ /dev/null @@ -1,89 +0,0 @@ -""" - :codeauthor: Nicole Thomas -""" - -import pytest - -from tests.support.case import ModuleCase - -OSA_SCRIPT = "/usr/bin/osascript" - - -@pytest.mark.destructive_test -@pytest.mark.skip_if_not_root -@pytest.mark.skip_initial_gh_actions_failure -@pytest.mark.skip_unless_on_darwin -class MacAssistiveTest(ModuleCase): - """ - Integration tests for the mac_assistive module. - """ - - def setUp(self): - """ - Sets up test requirements - """ - # Let's install a bundle to use in tests - self.run_function("assistive.install", [OSA_SCRIPT, True]) - - def tearDown(self): - """ - Clean up after tests - """ - # Delete any bundles that were installed - osa_script = self.run_function("assistive.installed", [OSA_SCRIPT]) - if osa_script: - self.run_function("assistive.remove", [OSA_SCRIPT]) - - smile_bundle = "com.smileonmymac.textexpander" - smile_bundle_present = self.run_function("assistive.installed", [smile_bundle]) - if smile_bundle_present: - self.run_function("assistive.remove", [smile_bundle]) - - @pytest.mark.slow_test - def test_install_and_remove(self): - """ - Tests installing and removing a bundled ID or command to use assistive access. - """ - new_bundle = "com.smileonmymac.textexpander" - self.assertTrue(self.run_function("assistive.install", [new_bundle])) - self.assertTrue(self.run_function("assistive.remove", [new_bundle])) - - @pytest.mark.slow_test - def test_installed(self): - """ - Tests the True and False return of assistive.installed. - """ - # OSA script should have been installed in setUp function - self.assertTrue(self.run_function("assistive.installed", [OSA_SCRIPT])) - # Clean up install - self.run_function("assistive.remove", [OSA_SCRIPT]) - # Installed should now return False - self.assertFalse(self.run_function("assistive.installed", [OSA_SCRIPT])) - - @pytest.mark.slow_test - def test_enable(self): - """ - Tests setting the enabled status of a bundled ID or command. - """ - # OSA script should have been installed and enabled in setUp function - # Now let's disable it, which should return True. - self.assertTrue(self.run_function("assistive.enable", [OSA_SCRIPT, False])) - # Double check the script was disabled, as intended. - self.assertFalse(self.run_function("assistive.enabled", [OSA_SCRIPT])) - # Now re-enable - self.assertTrue(self.run_function("assistive.enable", [OSA_SCRIPT])) - # Double check the script was enabled, as intended. - self.assertTrue(self.run_function("assistive.enabled", [OSA_SCRIPT])) - - @pytest.mark.slow_test - def test_enabled(self): - """ - Tests if a bundled ID or command is listed in assistive access returns True. - """ - # OSA script should have been installed in setUp function, which sets - # enabled to True by default. - self.assertTrue(self.run_function("assistive.enabled", [OSA_SCRIPT])) - # Disable OSA Script - self.run_function("assistive.enable", [OSA_SCRIPT, False]) - # Assert against new disabled status - self.assertFalse(self.run_function("assistive.enabled", [OSA_SCRIPT])) diff --git a/tests/integration/modules/test_mac_brew_pkg.py b/tests/integration/modules/test_mac_brew_pkg.py deleted file mode 100644 index 59d2dcde1dee..000000000000 --- a/tests/integration/modules/test_mac_brew_pkg.py +++ /dev/null @@ -1,188 +0,0 @@ -""" - :codeauthor: Nicole Thomas -""" - -import pytest - -from salt.exceptions import CommandExecutionError -from tests.support.case import ModuleCase - -# Brew doesn't support local package installation - So, let's -# Grab some small packages available online for brew -ADD_PKG = "algol68g" -DEL_PKG = "acme" - - -@pytest.mark.skip_if_not_root -@pytest.mark.destructive_test -@pytest.mark.skip_if_binaries_missing("brew") -@pytest.mark.skip_unless_on_darwin -class BrewModuleTest(ModuleCase): - """ - Integration tests for the brew module - """ - - @pytest.mark.slow_test - def test_brew_install(self): - """ - Tests the installation of packages - """ - try: - self.run_function("pkg.install", [ADD_PKG]) - pkg_list = self.run_function("pkg.list_pkgs") - try: - self.assertIn(ADD_PKG, pkg_list) - except AssertionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - except CommandExecutionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - - @pytest.mark.slow_test - def test_remove(self): - """ - Tests the removal of packages - """ - try: - # Install a package to delete - If unsuccessful, skip the test - self.run_function("pkg.install", [DEL_PKG]) - pkg_list = self.run_function("pkg.list_pkgs") - if DEL_PKG not in pkg_list: - self.run_function("pkg.install", [DEL_PKG]) - self.skipTest("Failed to install a package to delete") - - # Now remove the installed package - self.run_function("pkg.remove", [DEL_PKG]) - del_list = self.run_function("pkg.list_pkgs") - self.assertNotIn(DEL_PKG, del_list) - except CommandExecutionError: - self.run_function("pkg.remove", [DEL_PKG]) - raise - - @pytest.mark.slow_test - def test_version(self): - """ - Test pkg.version for mac. Installs a package and then checks we can get - a version for the installed package. - """ - try: - self.run_function("pkg.install", [ADD_PKG]) - pkg_list = self.run_function("pkg.list_pkgs") - version = self.run_function("pkg.version", [ADD_PKG]) - try: - self.assertTrue( - version, - msg="version: {} is empty, or other issue is present".format( - version - ), - ) - self.assertIn( - ADD_PKG, - pkg_list, - msg="package: {} is not in the list of installed packages: {}".format( - ADD_PKG, pkg_list - ), - ) - # make sure the version is accurate and is listed in the pkg_list - self.assertIn( - version, - str(pkg_list[ADD_PKG]), - msg="The {} version: {} is not listed in the pkg_list: {}".format( - ADD_PKG, version, pkg_list[ADD_PKG] - ), - ) - except AssertionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - except CommandExecutionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - - @pytest.mark.slow_test - def test_latest_version(self): - """ - Test pkg.latest_version: - - get the latest version available - - install the package - - get the latest version available - - check that the latest version is empty after installing it - """ - try: - self.run_function("pkg.remove", [ADD_PKG]) - uninstalled_latest = self.run_function("pkg.latest_version", [ADD_PKG]) - - self.run_function("pkg.install", [ADD_PKG]) - installed_latest = self.run_function("pkg.latest_version", [ADD_PKG]) - version = self.run_function("pkg.version", [ADD_PKG]) - try: - self.assertTrue(isinstance(uninstalled_latest, str)) - self.assertEqual(installed_latest, version) - except AssertionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - except CommandExecutionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - - @pytest.mark.slow_test - def test_refresh_db(self): - """ - Integration test to ensure pkg.refresh_db works with brew - """ - refresh_brew = self.run_function("pkg.refresh_db") - self.assertTrue(refresh_brew) - - @pytest.mark.slow_test - def test_list_upgrades(self): - """ - Test pkg.list_upgrades: data is in the form {'name1': 'version1', - 'name2': 'version2', ... } - """ - try: - upgrades = self.run_function("pkg.list_upgrades") - try: - self.assertTrue(isinstance(upgrades, dict)) - if upgrades: - for name in upgrades: - self.assertTrue(isinstance(name, str)) - self.assertTrue(isinstance(upgrades[name], str)) - except AssertionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - except CommandExecutionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - - @pytest.mark.slow_test - def test_info_installed(self): - """ - Test pkg.info_installed: info returned has certain fields used by - mac_brew.latest_version - """ - try: - self.run_function("pkg.install", [ADD_PKG]) - info = self.run_function("pkg.info_installed", [ADD_PKG]) - try: - self.assertTrue(ADD_PKG in info) - self.assertTrue("versions" in info[ADD_PKG]) - self.assertTrue("revision" in info[ADD_PKG]) - self.assertTrue("stable" in info[ADD_PKG]["versions"]) - except AssertionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - except CommandExecutionError: - self.run_function("pkg.remove", [ADD_PKG]) - raise - - def tearDown(self): - """ - Clean up after tests - """ - pkg_list = self.run_function("pkg.list_pkgs") - - # Remove any installed packages - if ADD_PKG in pkg_list: - self.run_function("pkg.remove", [ADD_PKG]) - if DEL_PKG in pkg_list: - self.run_function("pkg.remove", [DEL_PKG]) diff --git a/tests/integration/modules/test_mac_desktop.py b/tests/integration/modules/test_mac_desktop.py deleted file mode 100644 index 73f12c18e539..000000000000 --- a/tests/integration/modules/test_mac_desktop.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Integration tests for the mac_desktop execution module. -""" - -import pytest - -from tests.support.case import ModuleCase - - -@pytest.mark.destructive_test -@pytest.mark.skip_if_not_root -@pytest.mark.skip_unless_on_darwin -class MacDesktopTestCase(ModuleCase): - """ - Integration tests for the mac_desktop module. - """ - - def test_get_output_volume(self): - """ - Tests the return of get_output_volume. - """ - ret = self.run_function("desktop.get_output_volume") - self.assertIsNotNone(ret) - - @pytest.mark.slow_test - def test_set_output_volume(self): - """ - Tests the return of set_output_volume. - """ - current_vol = self.run_function("desktop.get_output_volume") - to_set = 10 - if current_vol == str(to_set): - to_set += 2 - new_vol = self.run_function("desktop.set_output_volume", [str(to_set)]) - check_vol = self.run_function("desktop.get_output_volume") - self.assertEqual(new_vol, check_vol) - - # Set volume back to what it was before - self.run_function("desktop.set_output_volume", [current_vol]) - - def test_screensaver(self): - """ - Tests the return of the screensaver function. - """ - self.assertTrue(self.run_function("desktop.screensaver")) - - def test_lock(self): - """ - Tests the return of the lock function. - """ - self.assertTrue(self.run_function("desktop.lock")) - - @pytest.mark.slow_test - def test_say(self): - """ - Tests the return of the say function. - """ - self.assertTrue(self.run_function("desktop.say", ["hello", "world"])) diff --git a/tests/integration/modules/test_mac_group.py b/tests/integration/modules/test_mac_group.py deleted file mode 100644 index 46be79667f85..000000000000 --- a/tests/integration/modules/test_mac_group.py +++ /dev/null @@ -1,177 +0,0 @@ -""" - :codeauthor: Nicole Thomas -""" - -import pytest -from saltfactories.utils import random_string - -from salt.exceptions import CommandExecutionError -from tests.support.case import ModuleCase - -# Create group name strings for tests -ADD_GROUP = random_string("RS-", lowercase=False) -DEL_GROUP = random_string("RS-", lowercase=False) -CHANGE_GROUP = random_string("RS-", lowercase=False) -ADD_USER = random_string("RS-", lowercase=False) -REP_USER_GROUP = random_string("RS-", lowercase=False) - - -@pytest.mark.skip_if_not_root -@pytest.mark.destructive_test -@pytest.mark.skip_unless_on_darwin -class MacGroupModuleTest(ModuleCase): - """ - Integration tests for the mac_group module - """ - - def setUp(self): - """ - Sets up test requirements - """ - os_grain = self.run_function("grains.item", ["kernel"]) - if os_grain["kernel"] not in "Darwin": - self.skipTest("Test not applicable to '{kernel}' kernel".format(**os_grain)) - - @pytest.mark.slow_test - def test_mac_group_add(self): - """ - Tests the add group function - """ - try: - self.run_function("group.add", [ADD_GROUP, 3456]) - group_info = self.run_function("group.info", [ADD_GROUP]) - self.assertEqual(group_info["name"], ADD_GROUP) - except CommandExecutionError: - self.run_function("group.delete", [ADD_GROUP]) - raise - - @pytest.mark.slow_test - def test_mac_group_delete(self): - """ - Tests the delete group function - """ - # Create a group to delete - If unsuccessful, skip the test - if self.run_function("group.add", [DEL_GROUP, 4567]) is not True: - self.run_function("group.delete", [DEL_GROUP]) - self.skipTest("Failed to create a group to delete") - - # Now try to delete the added group - ret = self.run_function("group.delete", [DEL_GROUP]) - self.assertTrue(ret) - - @pytest.mark.slow_test - def test_mac_group_chgid(self): - """ - Tests changing the group id - """ - # Create a group to delete - If unsuccessful, skip the test - if self.run_function("group.add", [CHANGE_GROUP, 5678]) is not True: - self.run_function("group.delete", [CHANGE_GROUP]) - self.skipTest("Failed to create a group to manipulate") - - try: - self.run_function("group.chgid", [CHANGE_GROUP, 6789]) - group_info = self.run_function("group.info", [CHANGE_GROUP]) - self.assertEqual(group_info["gid"], 6789) - except AssertionError: - self.run_function("group.delete", [CHANGE_GROUP]) - raise - - @pytest.mark.slow_test - def test_mac_adduser(self): - """ - Tests adding user to the group - """ - # Create a group to use for test - If unsuccessful, skip the test - if self.run_function("group.add", [ADD_GROUP, 5678]) is not True: - self.run_function("group.delete", [ADD_GROUP]) - self.skipTest("Failed to create a group to manipulate") - - try: - self.run_function("group.adduser", [ADD_GROUP, ADD_USER]) - group_info = self.run_function("group.info", [ADD_GROUP]) - self.assertEqual(ADD_USER, "".join(group_info["members"])) - except AssertionError: - self.run_function("group.delete", [ADD_GROUP]) - raise - - @pytest.mark.slow_test - def test_mac_deluser(self): - """ - Test deleting user from a group - """ - # Create a group to use for test - If unsuccessful, skip the test - if ( - self.run_function("group.add", [ADD_GROUP, 5678]) - and self.run_function("group.adduser", [ADD_GROUP, ADD_USER]) is not True - ): - self.run_function("group.delete", [ADD_GROUP]) - self.skipTest("Failed to create a group to manipulate") - - delusr = self.run_function("group.deluser", [ADD_GROUP, ADD_USER]) - self.assertTrue(delusr) - - group_info = self.run_function("group.info", [ADD_GROUP]) - self.assertNotIn(ADD_USER, "".join(group_info["members"])) - - @pytest.mark.slow_test - def test_mac_members(self): - """ - Test replacing members of a group - """ - if ( - self.run_function("group.add", [ADD_GROUP, 5678]) - and self.run_function("group.adduser", [ADD_GROUP, ADD_USER]) is not True - ): - self.run_function("group.delete", [ADD_GROUP]) - self.skipTest( - "Failed to create the {} group or add user {} to group " - "to manipulate".format(ADD_GROUP, ADD_USER) - ) - - rep_group_mem = self.run_function("group.members", [ADD_GROUP, REP_USER_GROUP]) - self.assertTrue(rep_group_mem) - - # ensure new user is added to group and previous user is removed - group_info = self.run_function("group.info", [ADD_GROUP]) - self.assertIn(REP_USER_GROUP, str(group_info["members"])) - self.assertNotIn(ADD_USER, str(group_info["members"])) - - @pytest.mark.slow_test - def test_mac_getent(self): - """ - Test returning info on all groups - """ - if ( - self.run_function("group.add", [ADD_GROUP, 5678]) - and self.run_function("group.adduser", [ADD_GROUP, ADD_USER]) is not True - ): - self.run_function("group.delete", [ADD_GROUP]) - self.skipTest( - "Failed to create the {} group or add user {} to group " - "to manipulate".format(ADD_GROUP, ADD_USER) - ) - - getinfo = self.run_function("group.getent") - self.assertTrue(getinfo) - self.assertIn(ADD_GROUP, str(getinfo)) - self.assertIn(ADD_USER, str(getinfo)) - - def tearDown(self): - """ - Clean up after tests - """ - # Delete ADD_GROUP - add_info = self.run_function("group.info", [ADD_GROUP]) - if add_info: - self.run_function("group.delete", [ADD_GROUP]) - - # Delete DEL_GROUP if something failed - del_info = self.run_function("group.info", [DEL_GROUP]) - if del_info: - self.run_function("group.delete", [DEL_GROUP]) - - # Delete CHANGE_GROUP - change_info = self.run_function("group.info", [CHANGE_GROUP]) - if change_info: - self.run_function("group.delete", [CHANGE_GROUP]) diff --git a/tests/integration/modules/test_mac_keychain.py b/tests/integration/modules/test_mac_keychain.py deleted file mode 100644 index afd195524f36..000000000000 --- a/tests/integration/modules/test_mac_keychain.py +++ /dev/null @@ -1,106 +0,0 @@ -""" -Validate the mac-keychain module -""" - -import os - -import pytest - -from salt.exceptions import CommandExecutionError -from tests.support.case import ModuleCase -from tests.support.runtests import RUNTIME_VARS - - -@pytest.mark.destructive_test -@pytest.mark.skip_if_not_root -@pytest.mark.skip_unless_on_darwin -class MacKeychainModuleTest(ModuleCase): - """ - Integration tests for the mac_keychain module - """ - - @classmethod - def setUpClass(cls): - cls.cert = os.path.join( - RUNTIME_VARS.FILES, "file", "base", "certs", "salttest.p12" - ) - cls.cert_alias = "Salt Test" - cls.passwd = "salttest" - - def tearDown(self): - """ - Clean up after tests - """ - # Remove the salttest cert, if left over. - certs_list = self.run_function("keychain.list_certs") - if self.cert_alias in certs_list: - self.run_function("keychain.uninstall", [self.cert_alias]) - - @pytest.mark.slow_test - def test_mac_keychain_install(self): - """ - Tests that attempts to install a certificate - """ - install_cert = self.run_function("keychain.install", [self.cert, self.passwd]) - self.assertTrue(install_cert) - - # check to ensure the cert was installed - certs_list = self.run_function("keychain.list_certs") - self.assertIn(self.cert_alias, certs_list) - - @pytest.mark.slow_test - def test_mac_keychain_uninstall(self): - """ - Tests that attempts to uninstall a certificate - """ - self.run_function("keychain.install", [self.cert, self.passwd]) - certs_list = self.run_function("keychain.list_certs") - - if self.cert_alias not in certs_list: - self.run_function("keychain.uninstall", [self.cert_alias]) - self.skipTest("Failed to install keychain") - - # uninstall cert - self.run_function("keychain.uninstall", [self.cert_alias]) - certs_list = self.run_function("keychain.list_certs") - - # check to ensure the cert was uninstalled - try: - self.assertNotIn(self.cert_alias, str(certs_list)) - except CommandExecutionError: - self.run_function("keychain.uninstall", [self.cert_alias]) - - @pytest.mark.slow_test - def test_mac_keychain_get_friendly_name(self): - """ - Test that attempts to get friendly name of a cert - """ - self.run_function("keychain.install", [self.cert, self.passwd]) - certs_list = self.run_function("keychain.list_certs") - if self.cert_alias not in certs_list: - self.run_function("keychain.uninstall", [self.cert_alias]) - self.skipTest("Failed to install keychain") - - get_name = self.run_function( - "keychain.get_friendly_name", [self.cert, self.passwd] - ) - self.assertEqual(get_name, self.cert_alias) - - @pytest.mark.slow_test - def test_mac_keychain_get_default_keychain(self): - """ - Test that attempts to get the default keychain - """ - salt_get_keychain = self.run_function("keychain.get_default_keychain") - sys_get_keychain = self.run_function( - "cmd.run", ["security default-keychain -d user"] - ) - self.assertEqual(salt_get_keychain, sys_get_keychain) - - def test_mac_keychain_list_certs(self): - """ - Test that attempts to list certs - """ - cert_default = "com.apple.systemdefault" - certs = self.run_function("keychain.list_certs") - self.assertIn(cert_default, certs) diff --git a/tests/integration/modules/test_mac_portspkg.py b/tests/integration/modules/test_mac_portspkg.py deleted file mode 100644 index 35ebe3735b6e..000000000000 --- a/tests/integration/modules/test_mac_portspkg.py +++ /dev/null @@ -1,104 +0,0 @@ -""" -integration tests for mac_ports -""" - -import pytest - -from tests.support.case import ModuleCase - - -@pytest.mark.skip_if_not_root -@pytest.mark.skip_if_binaries_missing("port") -@pytest.mark.skip_unless_on_darwin -class MacPortsModuleTest(ModuleCase): - """ - Validate the mac_ports module - """ - - AGREE_INSTALLED = False - - def setUp(self): - """ - Get current settings - """ - self.AGREE_INSTALLED = "agree" in self.run_function("pkg.list_pkgs") - self.run_function("pkg.refresh_db") - - def tearDown(self): - """ - Reset to original settings - """ - if not self.AGREE_INSTALLED: - self.run_function("pkg.remove", ["agree"]) - - @pytest.mark.destructive_test - def test_list_pkgs(self): - """ - Test pkg.list_pkgs - """ - self.run_function("pkg.install", ["agree"]) - self.assertIsInstance(self.run_function("pkg.list_pkgs"), dict) - self.assertIn("agree", self.run_function("pkg.list_pkgs")) - - @pytest.mark.destructive_test - def test_latest_version(self): - """ - Test pkg.latest_version - """ - self.run_function("pkg.install", ["agree"]) - result = self.run_function("pkg.latest_version", ["agree"], refresh=False) - self.assertIsInstance(result, dict) - self.assertIn("agree", result) - - @pytest.mark.destructive_test - def test_remove(self): - """ - Test pkg.remove - """ - self.run_function("pkg.install", ["agree"]) - removed = self.run_function("pkg.remove", ["agree"]) - self.assertIsInstance(removed, dict) - self.assertIn("agree", removed) - - @pytest.mark.destructive_test - def test_install(self): - """ - Test pkg.install - """ - self.run_function("pkg.remove", ["agree"]) - installed = self.run_function("pkg.install", ["agree"]) - self.assertIsInstance(installed, dict) - self.assertIn("agree", installed) - - def test_list_upgrades(self): - """ - Test pkg.list_upgrades - """ - self.assertIsInstance( - self.run_function("pkg.list_upgrades", refresh=False), dict - ) - - @pytest.mark.destructive_test - def test_upgrade_available(self): - """ - Test pkg.upgrade_available - """ - self.run_function("pkg.install", ["agree"]) - self.assertFalse( - self.run_function("pkg.upgrade_available", ["agree"], refresh=False) - ) - - def test_refresh_db(self): - """ - Test pkg.refresh_db - """ - self.assertTrue(self.run_function("pkg.refresh_db")) - - @pytest.mark.destructive_test - def test_upgrade(self): - """ - Test pkg.upgrade - """ - results = self.run_function("pkg.upgrade", refresh=False) - self.assertIsInstance(results, dict) - self.assertTrue(results["result"]) diff --git a/tests/integration/modules/test_mac_power.py b/tests/integration/modules/test_mac_power.py deleted file mode 100644 index cf63d0ae97d7..000000000000 --- a/tests/integration/modules/test_mac_power.py +++ /dev/null @@ -1,344 +0,0 @@ -""" -integration tests for mac_power -""" - -import pytest - -from tests.support.case import ModuleCase - - -@pytest.mark.flaky(max_runs=10) -@pytest.mark.skip_unless_on_darwin -@pytest.mark.skip_if_binaries_missing("systemsetup") -@pytest.mark.skip_if_not_root -class MacPowerModuleTest(ModuleCase): - """ - Validate the mac_power module - """ - - def setUp(self): - """ - Get current settings - """ - # Get current settings - self.COMPUTER_SLEEP = self.run_function("power.get_computer_sleep") - self.DISPLAY_SLEEP = self.run_function("power.get_display_sleep") - self.HARD_DISK_SLEEP = self.run_function("power.get_harddisk_sleep") - - def tearDown(self): - """ - Reset to original settings - """ - self.run_function("power.set_computer_sleep", [self.COMPUTER_SLEEP]) - self.run_function("power.set_display_sleep", [self.DISPLAY_SLEEP]) - self.run_function("power.set_harddisk_sleep", [self.HARD_DISK_SLEEP]) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_computer_sleep(self): - """ - Test power.get_computer_sleep - Test power.set_computer_sleep - """ - - # Normal Functionality - self.assertTrue(self.run_function("power.set_computer_sleep", [90])) - self.assertEqual( - self.run_function("power.get_computer_sleep"), "after 90 minutes" - ) - self.assertTrue(self.run_function("power.set_computer_sleep", ["Off"])) - self.assertEqual(self.run_function("power.get_computer_sleep"), "Never") - - # Test invalid input - self.assertIn( - "Invalid String Value for Minutes", - self.run_function("power.set_computer_sleep", ["spongebob"]), - ) - self.assertIn( - "Invalid Integer Value for Minutes", - self.run_function("power.set_computer_sleep", [0]), - ) - self.assertIn( - "Invalid Integer Value for Minutes", - self.run_function("power.set_computer_sleep", [181]), - ) - self.assertIn( - "Invalid Boolean Value for Minutes", - self.run_function("power.set_computer_sleep", [True]), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_display_sleep(self): - """ - Test power.get_display_sleep - Test power.set_display_sleep - """ - - # Normal Functionality - self.assertTrue(self.run_function("power.set_display_sleep", [90])) - self.assertEqual( - self.run_function("power.get_display_sleep"), "after 90 minutes" - ) - self.assertTrue(self.run_function("power.set_display_sleep", ["Off"])) - self.assertEqual(self.run_function("power.get_display_sleep"), "Never") - - # Test invalid input - self.assertIn( - "Invalid String Value for Minutes", - self.run_function("power.set_display_sleep", ["spongebob"]), - ) - self.assertIn( - "Invalid Integer Value for Minutes", - self.run_function("power.set_display_sleep", [0]), - ) - self.assertIn( - "Invalid Integer Value for Minutes", - self.run_function("power.set_display_sleep", [181]), - ) - self.assertIn( - "Invalid Boolean Value for Minutes", - self.run_function("power.set_display_sleep", [True]), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_harddisk_sleep(self): - """ - Test power.get_harddisk_sleep - Test power.set_harddisk_sleep - """ - - # Normal Functionality - self.assertTrue(self.run_function("power.set_harddisk_sleep", [90])) - self.assertEqual( - self.run_function("power.get_harddisk_sleep"), "after 90 minutes" - ) - self.assertTrue(self.run_function("power.set_harddisk_sleep", ["Off"])) - self.assertEqual(self.run_function("power.get_harddisk_sleep"), "Never") - - # Test invalid input - self.assertIn( - "Invalid String Value for Minutes", - self.run_function("power.set_harddisk_sleep", ["spongebob"]), - ) - self.assertIn( - "Invalid Integer Value for Minutes", - self.run_function("power.set_harddisk_sleep", [0]), - ) - self.assertIn( - "Invalid Integer Value for Minutes", - self.run_function("power.set_harddisk_sleep", [181]), - ) - self.assertIn( - "Invalid Boolean Value for Minutes", - self.run_function("power.set_harddisk_sleep", [True]), - ) - - @pytest.mark.slow_test - def test_restart_freeze(self): - """ - Test power.get_restart_freeze - Test power.set_restart_freeze - """ - # Normal Functionality - self.assertTrue(self.run_function("power.set_restart_freeze", ["on"])) - self.assertTrue(self.run_function("power.get_restart_freeze")) - # This will return False because mac fails to actually make the change - self.assertFalse(self.run_function("power.set_restart_freeze", ["off"])) - # Even setting to off returns true, it actually is never set - # This is an apple bug - self.assertTrue(self.run_function("power.get_restart_freeze")) - - -@pytest.mark.flaky(max_runs=10) -@pytest.mark.skip_unless_on_darwin -@pytest.mark.skip_if_binaries_missing("systemsetup") -@pytest.mark.skip_if_not_root -class MacPowerModuleTestSleepOnPowerButton(ModuleCase): - """ - Test power.get_sleep_on_power_button - Test power.set_sleep_on_power_button - """ - - SLEEP_ON_BUTTON = None - - def setUp(self): - """ - Check if function is available - Get existing value - """ - # Is the function available - ret = self.run_function("power.get_sleep_on_power_button") - if isinstance(ret, bool): - self.SLEEP_ON_BUTTON = self.run_function("power.get_sleep_on_power_button") - - def tearDown(self): - """ - Reset to original value - """ - if self.SLEEP_ON_BUTTON is not None: - self.run_function("power.set_sleep_on_power_button", [self.SLEEP_ON_BUTTON]) - - @pytest.mark.slow_test - def test_sleep_on_power_button(self): - """ - Test power.get_sleep_on_power_button - Test power.set_sleep_on_power_button - """ - # If available on this system, test it - if self.SLEEP_ON_BUTTON is None: - # Check for not available - ret = self.run_function("power.get_sleep_on_power_button") - self.assertIn("Error", ret) - else: - self.assertTrue( - self.run_function("power.set_sleep_on_power_button", ["on"]) - ) - self.assertTrue(self.run_function("power.get_sleep_on_power_button")) - self.assertTrue( - self.run_function("power.set_sleep_on_power_button", ["off"]) - ) - self.assertFalse(self.run_function("power.get_sleep_on_power_button")) - - -@pytest.mark.flaky(max_runs=10) -@pytest.mark.skip_unless_on_darwin -@pytest.mark.skip_if_binaries_missing("systemsetup") -@pytest.mark.skip_if_not_root -class MacPowerModuleTestRestartPowerFailure(ModuleCase): - """ - Test power.get_restart_power_failure - Test power.set_restart_power_failure - """ - - RESTART_POWER = None - - def setUp(self): - """ - Check if function is available - Get existing value - """ - # Is the function available - ret = self.run_function("power.get_restart_power_failure") - if isinstance(ret, bool): - self.RESTART_POWER = ret - - def tearDown(self): - """ - Reset to original value - """ - if self.RESTART_POWER is not None: - self.run_function("power.set_sleep_on_power_button", [self.SLEEP_ON_BUTTON]) - - def test_restart_power_failure(self): - """ - Test power.get_restart_power_failure - Test power.set_restart_power_failure - """ - # If available on this system, test it - if self.RESTART_POWER is None: - # Check for not available - ret = self.run_function("power.get_restart_power_failure") - self.assertIn("Error", ret) - else: - self.assertTrue( - self.run_function("power.set_restart_power_failure", ["on"]) - ) - self.assertTrue(self.run_function("power.get_restart_power_failure")) - self.assertTrue( - self.run_function("power.set_restart_power_failure", ["off"]) - ) - self.assertFalse(self.run_function("power.get_restart_power_failure")) - - -@pytest.mark.flaky(max_runs=10) -@pytest.mark.skip_unless_on_darwin -@pytest.mark.skip_if_binaries_missing("systemsetup") -@pytest.mark.skip_if_not_root -class MacPowerModuleTestWakeOnNet(ModuleCase): - """ - Test power.get_wake_on_network - Test power.set_wake_on_network - """ - - WAKE_ON_NET = None - - def setUp(self): - """ - Check if function is available - Get existing value - """ - # Is the function available - ret = self.run_function("power.get_wake_on_network") - if isinstance(ret, bool): - self.WAKE_ON_NET = ret - - def tearDown(self): - """ - Reset to original value - """ - if self.WAKE_ON_NET is not None: - self.run_function("power.set_wake_on_network", [self.WAKE_ON_NET]) - - def test_wake_on_network(self): - """ - Test power.get_wake_on_network - Test power.set_wake_on_network - """ - # If available on this system, test it - if self.WAKE_ON_NET is None: - # Check for not available - ret = self.run_function("power.get_wake_on_network") - self.assertIn("Error", ret) - else: - self.assertTrue(self.run_function("power.set_wake_on_network", ["on"])) - self.assertTrue(self.run_function("power.get_wake_on_network")) - self.assertTrue(self.run_function("power.set_wake_on_network", ["off"])) - self.assertFalse(self.run_function("power.get_wake_on_network")) - - -@pytest.mark.flaky(max_runs=10) -@pytest.mark.skip_unless_on_darwin -@pytest.mark.skip_if_binaries_missing("systemsetup") -@pytest.mark.skip_if_not_root -class MacPowerModuleTestWakeOnModem(ModuleCase): - """ - Test power.get_wake_on_modem - Test power.set_wake_on_modem - """ - - WAKE_ON_MODEM = None - - def setUp(self): - """ - Check if function is available - Get existing value - """ - # Is the function available - ret = self.run_function("power.get_wake_on_modem") - if isinstance(ret, bool): - self.WAKE_ON_MODEM = ret - - def tearDown(self): - """ - Reset to original value - """ - if self.WAKE_ON_MODEM is not None: - self.run_function("power.set_wake_on_modem", [self.WAKE_ON_MODEM]) - - def test_wake_on_modem(self): - """ - Test power.get_wake_on_modem - Test power.set_wake_on_modem - """ - # If available on this system, test it - if self.WAKE_ON_MODEM is None: - # Check for not available - ret = self.run_function("power.get_wake_on_modem") - self.assertIn("Error", ret) - else: - self.assertTrue(self.run_function("power.set_wake_on_modem", ["on"])) - self.assertTrue(self.run_function("power.get_wake_on_modem")) - self.assertTrue(self.run_function("power.set_wake_on_modem", ["off"])) - self.assertFalse(self.run_function("power.get_wake_on_modem")) diff --git a/tests/integration/modules/test_mac_service.py b/tests/integration/modules/test_mac_service.py deleted file mode 100644 index d4022cab535e..000000000000 --- a/tests/integration/modules/test_mac_service.py +++ /dev/null @@ -1,233 +0,0 @@ -""" -integration tests for mac_service -""" - -import plistlib - -import pytest - -import salt.utils.files -from tests.support.case import ModuleCase - - -@pytest.mark.skip_if_not_root -@pytest.mark.skip_if_binaries_missing("launchctl", "plutil") -@pytest.mark.skip_unless_on_darwin -class MacServiceModuleTest(ModuleCase): - """ - Validate the mac_service module - """ - - SERVICE_NAME = "com.salt.integration.test" - SERVICE_PATH = "/Library/LaunchDaemons/com.salt.integration.test.plist" - - def setUp(self): - """ - setup our test launch service. - """ - service_data = { - "KeepAlive": True, - "Label": self.SERVICE_NAME, - "ProgramArguments": ["/bin/sleep", "1000"], - "RunAtLoad": True, - } - with salt.utils.files.fopen(self.SERVICE_PATH, "wb") as fp: - plistlib.dump(service_data, fp) - self.run_function("service.enable", [self.SERVICE_NAME]) - self.run_function("service.start", [self.SERVICE_NAME]) - - def tearDown(self): - """ - stop and remove our test service. - """ - self.run_function("service.stop", [self.SERVICE_NAME]) - salt.utils.files.safe_rm(self.SERVICE_PATH) - - @pytest.mark.slow_test - def test_show(self): - """ - Test service.show - """ - # Existing Service - service_info = self.run_function("service.show", [self.SERVICE_NAME]) - self.assertIsInstance(service_info, dict) - self.assertEqual(service_info["plist"]["Label"], self.SERVICE_NAME) - - # Missing Service - self.assertIn( - "Service not found", self.run_function("service.show", ["spongebob"]) - ) - - @pytest.mark.slow_test - def test_launchctl(self): - """ - Test service.launchctl - """ - # Expected Functionality - self.assertTrue( - self.run_function("service.launchctl", ["error", "bootstrap", 64]) - ) - self.assertEqual( - self.run_function( - "service.launchctl", ["error", "bootstrap", 64], return_stdout=True - ), - "64: unknown error code", - ) - - # Raise an error - self.assertIn( - "Failed to error service", - self.run_function("service.launchctl", ["error", "bootstrap"]), - ) - - @pytest.mark.slow_test - def test_list(self): - """ - Test service.list - """ - # Expected Functionality - self.assertIn("PID", self.run_function("service.list")) - self.assertIn("{", self.run_function("service.list", [self.SERVICE_NAME])) - - # Service not found - self.assertIn( - "Service not found", self.run_function("service.list", ["spongebob"]) - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_enable(self): - """ - Test service.enable - """ - self.assertTrue(self.run_function("service.enable", [self.SERVICE_NAME])) - - self.assertIn( - "Service not found", self.run_function("service.enable", ["spongebob"]) - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_disable(self): - """ - Test service.disable - """ - self.assertTrue(self.run_function("service.disable", [self.SERVICE_NAME])) - - self.assertIn( - "Service not found", self.run_function("service.disable", ["spongebob"]) - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_start(self): - """ - Test service.start - Test service.stop - Test service.status - """ - self.assertTrue(self.run_function("service.start", [self.SERVICE_NAME])) - - self.assertIn( - "Service not found", self.run_function("service.start", ["spongebob"]) - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_stop(self): - """ - Test service.stop - """ - self.assertTrue(self.run_function("service.stop", [self.SERVICE_NAME])) - - self.assertIn( - "Service not found", self.run_function("service.stop", ["spongebob"]) - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_status(self): - """ - Test service.status - """ - # A running service - self.assertTrue(self.run_function("service.start", [self.SERVICE_NAME])) - self.assertTrue(self.run_function("service.status", [self.SERVICE_NAME])) - - # A stopped service - self.assertTrue(self.run_function("service.stop", [self.SERVICE_NAME])) - self.assertFalse(self.run_function("service.status", [self.SERVICE_NAME])) - - # Service not found - self.assertFalse(self.run_function("service.status", ["spongebob"])) - - @pytest.mark.slow_test - def test_available(self): - """ - Test service.available - """ - self.assertTrue(self.run_function("service.available", [self.SERVICE_NAME])) - self.assertFalse(self.run_function("service.available", ["spongebob"])) - - @pytest.mark.slow_test - def test_missing(self): - """ - Test service.missing - """ - self.assertFalse(self.run_function("service.missing", [self.SERVICE_NAME])) - self.assertTrue(self.run_function("service.missing", ["spongebob"])) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_enabled(self): - """ - Test service.enabled - """ - self.assertTrue(self.run_function("service.enabled", [self.SERVICE_NAME])) - self.assertTrue(self.run_function("service.start", [self.SERVICE_NAME])) - - self.assertTrue(self.run_function("service.enabled", [self.SERVICE_NAME])) - self.assertTrue(self.run_function("service.stop", [self.SERVICE_NAME])) - - self.assertTrue(self.run_function("service.enabled", ["spongebob"])) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_disabled(self): - """ - Test service.disabled - """ - self.assertTrue(self.run_function("service.start", [self.SERVICE_NAME])) - self.assertFalse(self.run_function("service.disabled", [self.SERVICE_NAME])) - - self.assertTrue(self.run_function("service.disable", [self.SERVICE_NAME])) - self.assertTrue(self.run_function("service.disabled", [self.SERVICE_NAME])) - self.assertTrue(self.run_function("service.enable", [self.SERVICE_NAME])) - self.assertIn( - "Service not found", self.run_function("service.stop", ["spongebob"]) - ) - - @pytest.mark.slow_test - def test_get_all(self): - """ - Test service.get_all - """ - services = self.run_function("service.get_all") - self.assertIsInstance(services, list) - self.assertIn(self.SERVICE_NAME, services) - - @pytest.mark.slow_test - def test_get_enabled(self): - """ - Test service.get_enabled - """ - services = self.run_function("service.get_enabled") - self.assertIsInstance(services, list) - self.assertIn(self.SERVICE_NAME, services) - - @pytest.mark.slow_test - def test_service_laoded(self): - """ - Test service.get_enabled - """ - self.assertTrue(self.run_function("service.loaded", [self.SERVICE_NAME])) diff --git a/tests/integration/modules/test_mac_shadow.py b/tests/integration/modules/test_mac_shadow.py deleted file mode 100644 index bb859ffbf099..000000000000 --- a/tests/integration/modules/test_mac_shadow.py +++ /dev/null @@ -1,226 +0,0 @@ -""" -integration tests for mac_shadow -""" - -import datetime - -import pytest -from saltfactories.utils import random_string - -from tests.support.case import ModuleCase - -TEST_USER = random_string("RS-", lowercase=False) -NO_USER = random_string("RS-", lowercase=False) - - -@pytest.mark.skip_if_binaries_missing("dscl", "pwpolicy") -@pytest.mark.skip_if_not_root -@pytest.mark.skip_unless_on_darwin -class MacShadowModuleTest(ModuleCase): - """ - Validate the mac_shadow module - """ - - def setUp(self): - """ - Get current settings - """ - self.run_function("user.add", [TEST_USER]) - - def tearDown(self): - """ - Reset to original settings - """ - self.run_function("user.delete", [TEST_USER]) - - @pytest.mark.slow_test - @pytest.mark.skip_initial_gh_actions_failure - def test_info(self): - """ - Test shadow.info - """ - # Correct Functionality - ret = self.run_function("shadow.info", [TEST_USER]) - self.assertEqual(ret["name"], TEST_USER) - - # User does not exist - ret = self.run_function("shadow.info", [NO_USER]) - self.assertEqual(ret["name"], "") - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_get_account_created(self): - """ - Test shadow.get_account_created - """ - # Correct Functionality - text_date = self.run_function("shadow.get_account_created", [TEST_USER]) - self.assertNotEqual(text_date, "Invalid Timestamp") - obj_date = datetime.datetime.strptime(text_date, "%Y-%m-%d %H:%M:%S") - self.assertIsInstance(obj_date, datetime.date) - - # User does not exist - self.assertEqual( - self.run_function("shadow.get_account_created", [NO_USER]), - "ERROR: User not found: {}".format(NO_USER), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - @pytest.mark.skip_initial_gh_actions_failure - def test_get_last_change(self): - """ - Test shadow.get_last_change - """ - # Correct Functionality - text_date = self.run_function("shadow.get_last_change", [TEST_USER]) - self.assertNotEqual(text_date, "Invalid Timestamp") - obj_date = datetime.datetime.strptime(text_date, "%Y-%m-%d %H:%M:%S") - self.assertIsInstance(obj_date, datetime.date) - - # User does not exist - self.assertEqual( - self.run_function("shadow.get_last_change", [NO_USER]), - "ERROR: User not found: {}".format(NO_USER), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - @pytest.mark.skip_initial_gh_actions_failure - def test_get_login_failed_last(self): - """ - Test shadow.get_login_failed_last - """ - # Correct Functionality - text_date = self.run_function("shadow.get_login_failed_last", [TEST_USER]) - self.assertNotEqual(text_date, "Invalid Timestamp") - obj_date = datetime.datetime.strptime(text_date, "%Y-%m-%d %H:%M:%S") - self.assertIsInstance(obj_date, datetime.date) - - # User does not exist - self.assertEqual( - self.run_function("shadow.get_login_failed_last", [NO_USER]), - "ERROR: User not found: {}".format(NO_USER), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - @pytest.mark.skip_initial_gh_actions_failure - def test_get_login_failed_count(self): - """ - Test shadow.get_login_failed_count - """ - # Correct Functionality - self.assertEqual( - self.run_function("shadow.get_login_failed_count", [TEST_USER]), "0" - ) - - # User does not exist - self.assertEqual( - self.run_function("shadow.get_login_failed_count", [NO_USER]), - "ERROR: User not found: {}".format(NO_USER), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_get_set_maxdays(self): - """ - Test shadow.get_maxdays - Test shadow.set_maxdays - """ - # Correct Functionality - self.assertTrue(self.run_function("shadow.set_maxdays", [TEST_USER, 20])) - self.assertEqual(self.run_function("shadow.get_maxdays", [TEST_USER]), 20) - - # User does not exist - self.assertEqual( - self.run_function("shadow.set_maxdays", [NO_USER, 7]), - "ERROR: User not found: {}".format(NO_USER), - ) - self.assertEqual( - self.run_function("shadow.get_maxdays", [NO_USER]), - "ERROR: User not found: {}".format(NO_USER), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_get_set_change(self): - """ - Test shadow.get_change - Test shadow.set_change - """ - # Correct Functionality - self.assertTrue( - self.run_function("shadow.set_change", [TEST_USER, "02/11/2011"]) - ) - self.assertEqual( - self.run_function("shadow.get_change", [TEST_USER]), "02/11/2011" - ) - - # User does not exist - self.assertEqual( - self.run_function("shadow.set_change", [NO_USER, "02/11/2012"]), - "ERROR: User not found: {}".format(NO_USER), - ) - self.assertEqual( - self.run_function("shadow.get_change", [NO_USER]), - "ERROR: User not found: {}".format(NO_USER), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_get_set_expire(self): - """ - Test shadow.get_expire - Test shadow.set_expire - """ - # Correct Functionality - self.assertTrue( - self.run_function("shadow.set_expire", [TEST_USER, "02/11/2011"]) - ) - self.assertEqual( - self.run_function("shadow.get_expire", [TEST_USER]), "02/11/2011" - ) - - # User does not exist - self.assertEqual( - self.run_function("shadow.set_expire", [NO_USER, "02/11/2012"]), - "ERROR: User not found: {}".format(NO_USER), - ) - self.assertEqual( - self.run_function("shadow.get_expire", [NO_USER]), - "ERROR: User not found: {}".format(NO_USER), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_del_password(self): - """ - Test shadow.del_password - """ - # Correct Functionality - self.assertTrue(self.run_function("shadow.del_password", [TEST_USER])) - self.assertEqual(self.run_function("shadow.info", [TEST_USER])["passwd"], "*") - - # User does not exist - self.assertEqual( - self.run_function("shadow.del_password", [NO_USER]), - "ERROR: User not found: {}".format(NO_USER), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_set_password(self): - """ - Test shadow.set_password - """ - # Correct Functionality - self.assertTrue( - self.run_function("shadow.set_password", [TEST_USER, "Pa$$W0rd"]) - ) - - # User does not exist - self.assertEqual( - self.run_function("shadow.set_password", [NO_USER, "P@SSw0rd"]), - "ERROR: User not found: {}".format(NO_USER), - ) diff --git a/tests/integration/modules/test_mac_softwareupdate.py b/tests/integration/modules/test_mac_softwareupdate.py deleted file mode 100644 index a8094969c358..000000000000 --- a/tests/integration/modules/test_mac_softwareupdate.py +++ /dev/null @@ -1,170 +0,0 @@ -""" -integration tests for mac_softwareupdate -""" - -import pytest - -from tests.support.case import ModuleCase - - -@pytest.mark.skip_if_not_root -@pytest.mark.skip_if_binaries_missing("softwareupdate") -@pytest.mark.skip_unless_on_darwin -class MacSoftwareUpdateModuleTest(ModuleCase): - """ - Validate the mac_softwareupdate module - """ - - IGNORED_LIST = [] - SCHEDULE = False - CATALOG = "" - - def setUp(self): - """ - Get current settings - """ - self.IGNORED_LIST = self.run_function("softwareupdate.list_ignored") - self.SCHEDULE = self.run_function("softwareupdate.schedule") - self.CATALOG = self.run_function("softwareupdate.get_catalog") - - super().setUp() - - def tearDown(self): - """ - Reset to original settings - """ - if self.IGNORED_LIST: - for item in self.IGNORED_LIST: - self.run_function("softwareupdate.ignore", [item]) - else: - self.run_function("softwareupdate.reset_ignored") - - self.run_function("softwareupdate.schedule", [self.SCHEDULE]) - - if self.CATALOG == "Default": - self.run_function("softwareupdate.reset_catalog") - else: - self.run_function("softwareupdate.set_catalog", [self.CATALOG]) - - super().tearDown() - - @pytest.mark.slow_test - def test_list_available(self): - """ - Test softwareupdate.list_available - """ - # Can't predict what will be returned, so can only test that the return - # is the correct type, dict - self.assertIsInstance(self.run_function("softwareupdate.list_available"), dict) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - @pytest.mark.skip_initial_gh_actions_failure - def test_ignore(self): - """ - Test softwareupdate.ignore - Test softwareupdate.list_ignored - Test softwareupdate.reset_ignored - """ - # Test reset_ignored - self.assertTrue(self.run_function("softwareupdate.reset_ignored")) - self.assertEqual(self.run_function("softwareupdate.list_ignored"), []) - - # Test ignore - self.assertTrue(self.run_function("softwareupdate.ignore", ["spongebob"])) - self.assertTrue(self.run_function("softwareupdate.ignore", ["squidward"])) - - # Test list_ignored and verify ignore - self.assertIn("spongebob", self.run_function("softwareupdate.list_ignored")) - self.assertIn("squidward", self.run_function("softwareupdate.list_ignored")) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - @pytest.mark.skip_initial_gh_actions_failure - def test_schedule(self): - """ - Test softwareupdate.schedule_enable - Test softwareupdate.schedule_enabled - """ - # Test enable - self.assertTrue(self.run_function("softwareupdate.schedule_enable", [True])) - self.assertTrue(self.run_function("softwareupdate.schedule_enabled")) - - # Test disable in case it was already enabled - self.assertTrue(self.run_function("softwareupdate.schedule_enable", [False])) - self.assertFalse(self.run_function("softwareupdate.schedule_enabled")) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_update(self): - """ - Test softwareupdate.update_all - Test softwareupdate.update - Test softwareupdate.update_available - - Need to know the names of updates that are available to properly test - the update functions... - """ - # There's no way to know what the dictionary will contain, so all we can - # check is that the return is a dictionary - self.assertIsInstance(self.run_function("softwareupdate.update_all"), dict) - - # Test update_available - self.assertFalse( - self.run_function("softwareupdate.update_available", ["spongebob"]) - ) - - # Test update not available - self.assertIn( - "Update not available", - self.run_function("softwareupdate.update", ["spongebob"]), - ) - - @pytest.mark.slow_test - def test_list_downloads(self): - """ - Test softwareupdate.list_downloads - """ - self.assertIsInstance(self.run_function("softwareupdate.list_downloads"), list) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_download(self): - """ - Test softwareupdate.download - - Need to know the names of updates that are available to properly test - the download function - """ - # Test update not available - self.assertIn( - "Update not available", - self.run_function("softwareupdate.download", ["spongebob"]), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_download_all(self): - """ - Test softwareupdate.download_all - """ - self.assertIsInstance(self.run_function("softwareupdate.download_all"), list) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - @pytest.mark.skip_initial_gh_actions_failure - def test_get_set_reset_catalog(self): - """ - Test softwareupdate.download_all - """ - # Reset the catalog - self.assertTrue(self.run_function("softwareupdate.reset_catalog")) - self.assertEqual(self.run_function("softwareupdate.get_catalog"), "Default") - - # Test setting and getting the catalog - self.assertTrue(self.run_function("softwareupdate.set_catalog", ["spongebob"])) - self.assertEqual(self.run_function("softwareupdate.get_catalog"), "spongebob") - - # Test reset the catalog - self.assertTrue(self.run_function("softwareupdate.reset_catalog")) - self.assertEqual(self.run_function("softwareupdate.get_catalog"), "Default") diff --git a/tests/integration/modules/test_mac_sysctl.py b/tests/integration/modules/test_mac_sysctl.py deleted file mode 100644 index 6d7b1c945d6c..000000000000 --- a/tests/integration/modules/test_mac_sysctl.py +++ /dev/null @@ -1,174 +0,0 @@ -""" - :codeauthor: Nicole Thomas -""" - -import os -import random - -import pytest - -import salt.utils.files -from salt.exceptions import CommandExecutionError -from tests.support.case import ModuleCase - -# Module Variables -ASSIGN_CMD = "net.inet.icmp.icmplim" -CONFIG = "/etc/sysctl.conf" - - -@pytest.mark.destructive_test -@pytest.mark.skip_if_not_root -@pytest.mark.skip_unless_on_darwin -class DarwinSysctlModuleTest(ModuleCase): - """ - Integration tests for the darwin_sysctl module - """ - - def setUp(self): - """ - Sets up the test requirements - """ - super().setUp() - # Data needed for cleanup - self.has_conf = False - self.val = self.run_function("sysctl.get", [ASSIGN_CMD]) - - # If sysctl file is present, make a copy - # Remove original file so we can replace it with test files - if os.path.isfile(CONFIG): - self.has_conf = True - try: - self.conf = self.__copy_sysctl() - except CommandExecutionError: - msg = "Could not copy file: {0}" - raise CommandExecutionError(msg.format(CONFIG)) - os.remove(CONFIG) - - @pytest.mark.slow_test - def test_assign(self): - """ - Tests assigning a single sysctl parameter - """ - try: - rand = random.randint(0, 500) - while rand == self.val: - rand = random.randint(0, 500) - self.run_function("sysctl.assign", [ASSIGN_CMD, rand]) - info = int(self.run_function("sysctl.get", [ASSIGN_CMD])) - try: - self.assertEqual(rand, info) - except AssertionError: - self.run_function("sysctl.assign", [ASSIGN_CMD, self.val]) - raise - except CommandExecutionError: - self.run_function("sysctl.assign", [ASSIGN_CMD, self.val]) - raise - - @pytest.mark.slow_test - def test_persist_new_file(self): - """ - Tests assigning a sysctl value to a system without a sysctl.conf file - """ - # Always start with a clean/known sysctl.conf state - if os.path.isfile(CONFIG): - os.remove(CONFIG) - try: - self.run_function("sysctl.persist", [ASSIGN_CMD, 10]) - line = "{}={}".format(ASSIGN_CMD, 10) - found = self.__check_string(CONFIG, line) - self.assertTrue(found) - except CommandExecutionError: - os.remove(CONFIG) - raise - - @pytest.mark.slow_test - def test_persist_already_set(self): - """ - Tests assigning a sysctl value that is already set in sysctl.conf file - """ - # Always start with a clean/known sysctl.conf state - if os.path.isfile(CONFIG): - os.remove(CONFIG) - try: - self.run_function("sysctl.persist", [ASSIGN_CMD, 50]) - ret = self.run_function("sysctl.persist", [ASSIGN_CMD, 50]) - self.assertEqual(ret, "Already set") - except CommandExecutionError: - os.remove(CONFIG) - raise - - @pytest.mark.slow_test - def test_persist_apply_change(self): - """ - Tests assigning a sysctl value and applying the change to system - """ - # Always start with a clean/known sysctl.conf state - if os.path.isfile(CONFIG): - os.remove(CONFIG) - try: - rand = random.randint(0, 500) - while rand == self.val: - rand = random.randint(0, 500) - self.run_function("sysctl.persist", [ASSIGN_CMD, rand], apply_change=True) - info = int(self.run_function("sysctl.get", [ASSIGN_CMD])) - self.assertEqual(info, rand) - except CommandExecutionError: - os.remove(CONFIG) - raise - - def __copy_sysctl(self): - """ - Copies an existing sysconf file and returns temp file path. Copied - file will be restored in tearDown - """ - # Create new temporary file path and open needed files - temp_path = salt.utils.files.mkstemp() - with salt.utils.files.fopen(CONFIG, "r") as org_conf: - with salt.utils.files.fopen(temp_path, "w") as temp_sysconf: - # write sysctl lines to temp file - for line in org_conf: - temp_sysconf.write(line) - return temp_path - - def __restore_sysctl(self): - """ - Restores the original sysctl.conf file from temporary copy - """ - # If sysctl testing file exists, delete it - if os.path.isfile(CONFIG): - os.remove(CONFIG) - - # write temp lines to sysctl file to restore - with salt.utils.files.fopen(self.conf, "r") as temp_sysctl: - with salt.utils.files.fopen(CONFIG, "w") as sysctl: - for line in temp_sysctl: - sysctl.write(line) - - # delete temporary file - os.remove(self.conf) - - def __check_string(self, conf_file, to_find): - """ - Returns True if given line is present in file - """ - with salt.utils.files.fopen(conf_file, "r") as f_in: - for line in f_in: - if to_find in salt.utils.stringutils.to_unicode(line): - return True - return False - - def tearDown(self): - """ - Clean up after tests - """ - ret = self.run_function("sysctl.get", [ASSIGN_CMD]) - if ret != self.val: - self.run_function("sysctl.assign", [ASSIGN_CMD, self.val]) - - if self.has_conf is True: - # restore original sysctl file - self.__restore_sysctl() - - if self.has_conf is False and os.path.isfile(CONFIG): - # remove sysctl.conf created by tests - os.remove(CONFIG) diff --git a/tests/integration/modules/test_mac_system.py b/tests/integration/modules/test_mac_system.py deleted file mode 100644 index 10aee4a88ea2..000000000000 --- a/tests/integration/modules/test_mac_system.py +++ /dev/null @@ -1,262 +0,0 @@ -""" -integration tests for mac_system -""" - -import logging - -import pytest -from saltfactories.utils import random_string - -from tests.support.case import ModuleCase - -log = logging.getLogger(__name__) - - -SET_COMPUTER_NAME = random_string("RS-", lowercase=False) -SET_SUBNET_NAME = random_string("RS-", lowercase=False) - - -@pytest.mark.flaky(max_runs=10) -@pytest.mark.skip_unless_on_darwin -@pytest.mark.usefixtures("salt_sub_minion") -@pytest.mark.skip_if_not_root -@pytest.mark.skip_if_binaries_missing("systemsetup") -class MacSystemModuleTest(ModuleCase): - """ - Validate the mac_system module - """ - - ATRUN_ENABLED = False - REMOTE_LOGIN_ENABLED = False - REMOTE_EVENTS_ENABLED = False - SUBNET_NAME = "" - KEYBOARD_DISABLED = False - - def setUp(self): - """ - Get current settings - """ - self.ATRUN_ENABLED = self.run_function("service.enabled", ["com.apple.atrun"]) - self.REMOTE_LOGIN_ENABLED = self.run_function("system.get_remote_login") - self.REMOTE_EVENTS_ENABLED = self.run_function("system.get_remote_events") - self.SUBNET_NAME = self.run_function("system.get_subnet_name") - self.KEYBOARD_DISABLED = self.run_function( - "system.get_disable_keyboard_on_lock" - ) - - def tearDown(self): - """ - Reset to original settings - """ - if not self.ATRUN_ENABLED: - atrun = "/System/Library/LaunchDaemons/com.apple.atrun.plist" - self.run_function("service.stop", [atrun]) - - self.run_function("system.set_remote_login", [self.REMOTE_LOGIN_ENABLED]) - self.run_function("system.set_remote_events", [self.REMOTE_EVENTS_ENABLED]) - self.run_function("system.set_subnet_name", [self.SUBNET_NAME]) - self.run_function( - "system.set_disable_keyboard_on_lock", [self.KEYBOARD_DISABLED] - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_get_set_remote_login(self): - """ - Test system.get_remote_login - Test system.set_remote_login - """ - # Normal Functionality - self.assertTrue(self.run_function("system.set_remote_login", [True])) - self.assertTrue(self.run_function("system.get_remote_login")) - self.assertTrue(self.run_function("system.set_remote_login", [False])) - self.assertFalse(self.run_function("system.get_remote_login")) - - # Test valid input - self.assertTrue(self.run_function("system.set_remote_login", [True])) - self.assertTrue(self.run_function("system.set_remote_login", [False])) - self.assertTrue(self.run_function("system.set_remote_login", ["yes"])) - self.assertTrue(self.run_function("system.set_remote_login", ["no"])) - self.assertTrue(self.run_function("system.set_remote_login", ["On"])) - self.assertTrue(self.run_function("system.set_remote_login", ["Off"])) - self.assertTrue(self.run_function("system.set_remote_login", [1])) - self.assertTrue(self.run_function("system.set_remote_login", [0])) - - # Test invalid input - self.assertIn( - "Invalid String Value for Enabled", - self.run_function("system.set_remote_login", ["spongebob"]), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_get_set_remote_events(self): - """ - Test system.get_remote_events - Test system.set_remote_events - """ - # Normal Functionality - self.assertTrue(self.run_function("system.set_remote_events", [True])) - self.assertTrue(self.run_function("system.get_remote_events")) - self.assertTrue(self.run_function("system.set_remote_events", [False])) - self.assertFalse(self.run_function("system.get_remote_events")) - - # Test valid input - self.assertTrue(self.run_function("system.set_remote_events", [True])) - self.assertTrue(self.run_function("system.set_remote_events", [False])) - self.assertTrue(self.run_function("system.set_remote_events", ["yes"])) - self.assertTrue(self.run_function("system.set_remote_events", ["no"])) - self.assertTrue(self.run_function("system.set_remote_events", ["On"])) - self.assertTrue(self.run_function("system.set_remote_events", ["Off"])) - self.assertTrue(self.run_function("system.set_remote_events", [1])) - self.assertTrue(self.run_function("system.set_remote_events", [0])) - - # Test invalid input - self.assertIn( - "Invalid String Value for Enabled", - self.run_function("system.set_remote_events", ["spongebob"]), - ) - - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_get_set_subnet_name(self): - """ - Test system.get_subnet_name - Test system.set_subnet_name - """ - self.assertTrue(self.run_function("system.set_subnet_name", [SET_SUBNET_NAME])) - self.assertEqual(self.run_function("system.get_subnet_name"), SET_SUBNET_NAME) - - @pytest.mark.slow_test - @pytest.mark.skip_initial_gh_actions_failure - def test_get_list_startup_disk(self): - """ - Test system.get_startup_disk - Test system.list_startup_disks - Don't know how to test system.set_startup_disk as there's usually only - one startup disk available on a system - """ - # Test list and get - ret = self.run_function("system.list_startup_disks") - self.assertIsInstance(ret, list) - self.assertIn(self.run_function("system.get_startup_disk"), ret) - - # Test passing set a bad disk - self.assertIn( - "Invalid value passed for path.", - self.run_function("system.set_startup_disk", ["spongebob"]), - ) - - @pytest.mark.skip(reason="Skip this test until mac fixes it.") - def test_get_set_restart_delay(self): - """ - Test system.get_restart_delay - Test system.set_restart_delay - system.set_restart_delay does not work due to an apple bug, see docs - may need to disable this test as we can't control the delay value - """ - # Normal Functionality - self.assertTrue(self.run_function("system.set_restart_delay", [90])) - self.assertEqual(self.run_function("system.get_restart_delay"), "90 seconds") - - # Pass set bad value for seconds - self.assertIn( - "Invalid value passed for seconds.", - self.run_function("system.set_restart_delay", [70]), - ) - - @pytest.mark.slow_test - def test_get_set_disable_keyboard_on_lock(self): - """ - Test system.get_disable_keyboard_on_lock - Test system.set_disable_keyboard_on_lock - """ - # Normal Functionality - self.assertTrue( - self.run_function("system.set_disable_keyboard_on_lock", [True]) - ) - self.assertTrue(self.run_function("system.get_disable_keyboard_on_lock")) - - self.assertTrue( - self.run_function("system.set_disable_keyboard_on_lock", [False]) - ) - self.assertFalse(self.run_function("system.get_disable_keyboard_on_lock")) - - # Test valid input - self.assertTrue( - self.run_function("system.set_disable_keyboard_on_lock", [True]) - ) - self.assertTrue( - self.run_function("system.set_disable_keyboard_on_lock", [False]) - ) - self.assertTrue( - self.run_function("system.set_disable_keyboard_on_lock", ["yes"]) - ) - self.assertTrue( - self.run_function("system.set_disable_keyboard_on_lock", ["no"]) - ) - self.assertTrue( - self.run_function("system.set_disable_keyboard_on_lock", ["On"]) - ) - self.assertTrue( - self.run_function("system.set_disable_keyboard_on_lock", ["Off"]) - ) - self.assertTrue(self.run_function("system.set_disable_keyboard_on_lock", [1])) - self.assertTrue(self.run_function("system.set_disable_keyboard_on_lock", [0])) - - # Test invalid input - self.assertIn( - "Invalid String Value for Enabled", - self.run_function("system.set_disable_keyboard_on_lock", ["spongebob"]), - ) - - @pytest.mark.skip(reason="Skip this test until mac fixes it.") - def test_get_set_boot_arch(self): - """ - Test system.get_boot_arch - Test system.set_boot_arch - system.set_boot_arch does not work due to an apple bug, see docs - may need to disable this test as we can't set the boot architecture - """ - # Normal Functionality - self.assertTrue(self.run_function("system.set_boot_arch", ["i386"])) - self.assertEqual(self.run_function("system.get_boot_arch"), "i386") - self.assertTrue(self.run_function("system.set_boot_arch", ["default"])) - self.assertEqual(self.run_function("system.get_boot_arch"), "default") - - # Test invalid input - self.assertIn( - "Invalid value passed for arch", - self.run_function("system.set_boot_arch", ["spongebob"]), - ) - - -@pytest.mark.skip_unless_on_darwin -@pytest.mark.skip_if_not_root -class MacSystemComputerNameTest(ModuleCase): - def setUp(self): - self.COMPUTER_NAME = self.run_function("system.get_computer_name") - self.wait_for_all_jobs() - - def tearDown(self): - self.run_function("system.set_computer_name", [self.COMPUTER_NAME]) - self.wait_for_all_jobs() - - # A similar test used to be skipped on py3 due to 'hanging', if we see - # something similar again we may want to skip this gain until we - # investigate - # @pytest.mark.skipif(salt.utils.platform.is_darwin() and six.PY3, reason='This test hangs on OS X on Py3. Skipping until #53566 is merged.') - @pytest.mark.destructive_test - @pytest.mark.slow_test - def test_get_set_computer_name(self): - """ - Test system.get_computer_name - Test system.set_computer_name - """ - log.debug("Set name is %s", SET_COMPUTER_NAME) - self.assertTrue( - self.run_function("system.set_computer_name", [SET_COMPUTER_NAME]) - ) - self.assertEqual( - self.run_function("system.get_computer_name"), SET_COMPUTER_NAME - ) diff --git a/tests/integration/modules/test_mac_timezone.py b/tests/integration/modules/test_mac_timezone.py deleted file mode 100644 index f0905843360d..000000000000 --- a/tests/integration/modules/test_mac_timezone.py +++ /dev/null @@ -1,205 +0,0 @@ -""" -Integration tests for mac_timezone - -If using parallels, make sure Time sync is turned off. Otherwise, parallels will -keep changing your date/time settings while the tests are running. To turn off -Time sync do the following: - - Go to actions -> configure - - Select options at the top and 'More Options' on the left - - Set time to 'Do not sync' -""" - -import datetime - -import pytest - -from tests.support.case import ModuleCase - - -@pytest.mark.flaky(max_runs=4) -@pytest.mark.skip_unless_on_darwin -@pytest.mark.skip_if_binaries_missing("systemsetup") -@pytest.mark.skip_if_not_root -class MacTimezoneModuleTest(ModuleCase): - """ - Validate the mac_timezone module - """ - - USE_NETWORK_TIME = False - TIME_SERVER = "time.apple.com" - TIME_ZONE = "" - CURRENT_DATE = "" - CURRENT_TIME = "" - - def setUp(self): - """ - Get current settings - """ - self.USE_NETWORK_TIME = self.run_function("timezone.get_using_network_time") - self.TIME_SERVER = self.run_function("timezone.get_time_server") - self.TIME_ZONE = self.run_function("timezone.get_zone") - self.CURRENT_DATE = self.run_function("timezone.get_date") - self.CURRENT_TIME = self.run_function("timezone.get_time") - - self.run_function("timezone.set_using_network_time", [False]) - self.run_function("timezone.set_zone", ["America/Denver"]) - - def tearDown(self): - """ - Reset to original settings - """ - self.run_function("timezone.set_time_server", [self.TIME_SERVER]) - self.run_function("timezone.set_using_network_time", [self.USE_NETWORK_TIME]) - self.run_function("timezone.set_zone", [self.TIME_ZONE]) - if not self.USE_NETWORK_TIME: - self.run_function("timezone.set_date", [self.CURRENT_DATE]) - self.run_function("timezone.set_time", [self.CURRENT_TIME]) - - @pytest.mark.skip( - reason="Skip until we can figure out why modifying the system clock causes ZMQ errors", - ) - @pytest.mark.destructive_test - def test_get_set_date(self): - """ - Test timezone.get_date - Test timezone.set_date - """ - # Correct Functionality - self.assertTrue(self.run_function("timezone.set_date", ["2/20/2011"])) - self.assertEqual(self.run_function("timezone.get_date"), "2/20/2011") - - # Test bad date format - self.assertEqual( - self.run_function("timezone.set_date", ["13/12/2014"]), - "ERROR executing 'timezone.set_date': Invalid Date/Time Format: 13/12/2014", - ) - - @pytest.mark.slow_test - def test_get_time(self): - """ - Test timezone.get_time - """ - text_time = self.run_function("timezone.get_time") - self.assertNotEqual(text_time, "Invalid Timestamp") - obj_date = datetime.datetime.strptime(text_time, "%H:%M:%S") - self.assertIsInstance(obj_date, datetime.date) - - @pytest.mark.skip( - reason="Skip until we can figure out why modifying the system clock causes ZMQ errors", - ) - @pytest.mark.destructive_test - def test_set_time(self): - """ - Test timezone.set_time - """ - # Correct Functionality - self.assertTrue(self.run_function("timezone.set_time", ["3:14"])) - - # Test bad time format - self.assertEqual( - self.run_function("timezone.set_time", ["3:71"]), - "ERROR executing 'timezone.set_time': Invalid Date/Time Format: 3:71", - ) - - @pytest.mark.skip( - reason="Skip until we can figure out why modifying the system clock causes ZMQ errors", - ) - @pytest.mark.destructive_test - def test_get_set_zone(self): - """ - Test timezone.get_zone - Test timezone.set_zone - """ - # Correct Functionality - self.assertTrue(self.run_function("timezone.set_zone", ["Pacific/Wake"])) - self.assertEqual(self.run_function("timezone.get_zone"), "Pacific/Wake") - - # Test bad time zone - self.assertEqual( - self.run_function("timezone.set_zone", ["spongebob"]), - "ERROR executing 'timezone.set_zone': Invalid Timezone: spongebob", - ) - - @pytest.mark.skip( - reason="Skip until we can figure out why modifying the system clock causes ZMQ errors", - ) - @pytest.mark.destructive_test - def test_get_offset(self): - """ - Test timezone.get_offset - """ - self.assertTrue(self.run_function("timezone.set_zone", ["Pacific/Wake"])) - self.assertIsInstance(self.run_function("timezone.get_offset"), (str,)) - self.assertEqual(self.run_function("timezone.get_offset"), "+1200") - - self.assertTrue(self.run_function("timezone.set_zone", ["America/Los_Angeles"])) - self.assertIsInstance(self.run_function("timezone.get_offset"), (str,)) - self.assertEqual(self.run_function("timezone.get_offset"), "-0700") - - @pytest.mark.skip( - reason="Skip until we can figure out why modifying the system clock causes ZMQ errors", - ) - @pytest.mark.destructive_test - def test_get_set_zonecode(self): - """ - Test timezone.get_zonecode - Test timezone.set_zonecode - """ - self.assertTrue(self.run_function("timezone.set_zone", ["America/Los_Angeles"])) - self.assertIsInstance(self.run_function("timezone.get_zonecode"), (str,)) - self.assertEqual(self.run_function("timezone.get_zonecode"), "PDT") - - self.assertTrue(self.run_function("timezone.set_zone", ["Pacific/Wake"])) - self.assertIsInstance(self.run_function("timezone.get_zonecode"), (str,)) - self.assertEqual(self.run_function("timezone.get_zonecode"), "WAKT") - - @pytest.mark.slow_test - def test_list_zones(self): - """ - Test timezone.list_zones - """ - zones = self.run_function("timezone.list_zones") - self.assertIsInstance(self.run_function("timezone.list_zones"), list) - self.assertIn("America/Denver", self.run_function("timezone.list_zones")) - self.assertIn("America/Los_Angeles", self.run_function("timezone.list_zones")) - - @pytest.mark.skip( - reason="Skip until we can figure out why modifying the system clock causes ZMQ errors", - ) - @pytest.mark.destructive_test - def test_zone_compare(self): - """ - Test timezone.zone_compare - """ - self.assertTrue(self.run_function("timezone.set_zone", ["America/Denver"])) - self.assertTrue(self.run_function("timezone.zone_compare", ["America/Denver"])) - self.assertFalse(self.run_function("timezone.zone_compare", ["Pacific/Wake"])) - - @pytest.mark.skip( - reason="Skip until we can figure out why modifying the system clock causes ZMQ errors", - ) - @pytest.mark.destructive_test - def test_get_set_using_network_time(self): - """ - Test timezone.get_using_network_time - Test timezone.set_using_network_time - """ - self.assertTrue(self.run_function("timezone.set_using_network_time", [True])) - self.assertTrue(self.run_function("timezone.get_using_network_time")) - - self.assertTrue(self.run_function("timezone.set_using_network_time", [False])) - self.assertFalse(self.run_function("timezone.get_using_network_time")) - - @pytest.mark.skip( - reason="Skip until we can figure out why modifying the system clock causes ZMQ errors", - ) - @pytest.mark.destructive_test - def test_get_set_time_server(self): - """ - Test timezone.get_time_server - Test timezone.set_time_server - """ - self.assertTrue( - self.run_function("timezone.set_time_server", ["spongebob.com"]) - ) - self.assertEqual(self.run_function("timezone.get_time_server"), "spongebob.com") diff --git a/tests/integration/modules/test_mac_user.py b/tests/integration/modules/test_mac_user.py deleted file mode 100644 index 416d5bb8d68f..000000000000 --- a/tests/integration/modules/test_mac_user.py +++ /dev/null @@ -1,235 +0,0 @@ -""" - :codeauthor: Nicole Thomas -""" - -import os - -import pytest -from saltfactories.utils import random_string - -import salt.utils.files -from salt.exceptions import CommandExecutionError -from tests.support.case import ModuleCase - -# Create user strings for tests -ADD_USER = random_string("RS-", lowercase=False) -DEL_USER = random_string("RS-", lowercase=False) -PRIMARY_GROUP_USER = random_string("RS-", lowercase=False) -CHANGE_USER = random_string("RS-", lowercase=False) - - -@pytest.mark.skip_if_not_root -@pytest.mark.destructive_test -@pytest.mark.skip_unless_on_darwin -class MacUserModuleTest(ModuleCase): - """ - Integration tests for the mac_user module - """ - - def setUp(self): - """ - Sets up test requirements - """ - super().setUp() - os_grain = self.run_function("grains.item", ["kernel"]) - if os_grain["kernel"] not in "Darwin": - self.skipTest("Test not applicable to '{kernel}' kernel".format(**os_grain)) - - @pytest.mark.slow_test - def test_mac_user_add(self): - """ - Tests the add function - """ - try: - self.run_function("user.add", [ADD_USER]) - user_info = self.run_function("user.info", [ADD_USER]) - self.assertEqual(ADD_USER, user_info["name"]) - except CommandExecutionError: - self.run_function("user.delete", [ADD_USER]) - raise - - @pytest.mark.slow_test - def test_mac_user_delete(self): - """ - Tests the delete function - """ - - # Create a user to delete - If unsuccessful, skip the test - if self.run_function("user.add", [DEL_USER]) is not True: - self.run_function("user.delete", [DEL_USER]) - self.skipTest("Failed to create a user to delete") - - # Now try to delete the added user - ret = self.run_function("user.delete", [DEL_USER]) - self.assertTrue(ret) - - @pytest.mark.slow_test - def test_mac_user_primary_group(self): - """ - Tests the primary_group function - """ - - # Create a user to test primary group function - if self.run_function("user.add", [PRIMARY_GROUP_USER]) is not True: - self.run_function("user.delete", [PRIMARY_GROUP_USER]) - self.skipTest("Failed to create a user") - - try: - # Test mac_user.primary_group - primary_group = self.run_function( - "user.primary_group", [PRIMARY_GROUP_USER] - ) - uid_info = self.run_function("user.info", [PRIMARY_GROUP_USER]) - self.assertIn(primary_group, uid_info["groups"]) - - except AssertionError: - self.run_function("user.delete", [PRIMARY_GROUP_USER]) - raise - - @pytest.mark.slow_test - def test_mac_user_changes(self): - """ - Tests mac_user functions that change user properties - """ - # Create a user to manipulate - if unsuccessful, skip the test - if self.run_function("user.add", [CHANGE_USER]) is not True: - self.run_function("user.delete", [CHANGE_USER]) - self.skipTest("Failed to create a user") - - try: - # Test mac_user.chuid - self.run_function("user.chuid", [CHANGE_USER, 4376]) - uid_info = self.run_function("user.info", [CHANGE_USER]) - self.assertEqual(uid_info["uid"], 4376) - - # Test mac_user.chgid - self.run_function("user.chgid", [CHANGE_USER, 4376]) - gid_info = self.run_function("user.info", [CHANGE_USER]) - self.assertEqual(gid_info["gid"], 4376) - - # Test mac.user.chshell - self.run_function("user.chshell", [CHANGE_USER, "/bin/zsh"]) - shell_info = self.run_function("user.info", [CHANGE_USER]) - self.assertEqual(shell_info["shell"], "/bin/zsh") - - # Test mac_user.chhome - self.run_function("user.chhome", [CHANGE_USER, "/Users/foo"]) - home_info = self.run_function("user.info", [CHANGE_USER]) - self.assertEqual(home_info["home"], "/Users/foo") - - # Test mac_user.chfullname - self.run_function("user.chfullname", [CHANGE_USER, "Foo Bar"]) - fullname_info = self.run_function("user.info", [CHANGE_USER]) - self.assertEqual(fullname_info["fullname"], "Foo Bar") - - # Test mac_user.chgroups - pre_info = self.run_function("user.info", [CHANGE_USER])["groups"] - expected = pre_info + ["wheel"] - self.run_function("user.chgroups", [CHANGE_USER, "wheel"]) - groups_info = self.run_function("user.info", [CHANGE_USER]) - self.assertEqual(groups_info["groups"], expected) - - except AssertionError: - self.run_function("user.delete", [CHANGE_USER]) - raise - - @pytest.mark.slow_test - def test_mac_user_enable_auto_login(self): - """ - Tests mac_user functions that enable auto login - """ - # Make sure auto login is disabled before we start - if self.run_function("user.get_auto_login"): - self.skipTest("Auto login already enabled") - - try: - # Does enable return True - self.assertTrue( - self.run_function( - "user.enable_auto_login", ["Spongebob", "Squarepants"] - ) - ) - - # Did it set the user entry in the plist file - self.assertEqual(self.run_function("user.get_auto_login"), "Spongebob") - - # Did it generate the `/etc/kcpassword` file - self.assertTrue(os.path.exists("/etc/kcpassword")) - - # Are the contents of the file correct - test_data = b".\xc3\xb8'B\xc2\xa0\xc3\x99\xc2\xad\xc2\x8b\xc3\x8d\xc3\x8dl" - with salt.utils.files.fopen("/etc/kcpassword", "rb") as f: - file_data = f.read() - self.assertEqual(test_data, file_data) - - # Does disable return True - self.assertTrue(self.run_function("user.disable_auto_login")) - - # Does it remove the user entry in the plist file - self.assertFalse(self.run_function("user.get_auto_login")) - - # Is the `/etc/kcpassword` file removed - self.assertFalse(os.path.exists("/etc/kcpassword")) - - finally: - # Make sure auto_login is disabled - self.assertTrue(self.run_function("user.disable_auto_login")) - - # Make sure autologin is disabled - if self.run_function("user.get_auto_login"): - raise Exception("Failed to disable auto login") - - @pytest.mark.slow_test - def test_mac_user_disable_auto_login(self): - """ - Tests mac_user functions that disable auto login - """ - # Make sure auto login is enabled before we start - # Is there an existing setting - if self.run_function("user.get_auto_login"): - self.skipTest("Auto login already enabled") - - try: - # Enable auto login for the test - self.run_function("user.enable_auto_login", ["Spongebob", "Squarepants"]) - - # Make sure auto login got set up - if not self.run_function("user.get_auto_login") == "Spongebob": - raise Exception("Failed to enable auto login") - - # Does disable return True - self.assertTrue(self.run_function("user.disable_auto_login")) - - # Does it remove the user entry in the plist file - self.assertFalse(self.run_function("user.get_auto_login")) - - # Is the `/etc/kcpassword` file removed - self.assertFalse(os.path.exists("/etc/kcpassword")) - - finally: - # Make sure auto login is disabled - self.assertTrue(self.run_function("user.disable_auto_login")) - - # Make sure auto login is disabled - if self.run_function("user.get_auto_login"): - raise Exception("Failed to disable auto login") - - def tearDown(self): - """ - Clean up after tests - """ - - # Delete ADD_USER - add_info = self.run_function("user.info", [ADD_USER]) - if add_info: - self.run_function("user.delete", [ADD_USER]) - - # Delete DEL_USER if something failed - del_info = self.run_function("user.info", [DEL_USER]) - if del_info: - self.run_function("user.delete", [DEL_USER]) - - # Delete CHANGE_USER - change_info = self.run_function("user.info", [CHANGE_USER]) - if change_info: - self.run_function("user.delete", [CHANGE_USER]) diff --git a/tests/integration/modules/test_mac_xattr.py b/tests/integration/modules/test_mac_xattr.py deleted file mode 100644 index bcc39339b069..000000000000 --- a/tests/integration/modules/test_mac_xattr.py +++ /dev/null @@ -1,193 +0,0 @@ -""" -integration tests for mac_xattr -""" - -import os - -import pytest - -from tests.support.case import ModuleCase -from tests.support.runtests import RUNTIME_VARS - - -@pytest.mark.skip_if_binaries_missing("xattr") -@pytest.mark.skip_unless_on_darwin -class MacXattrModuleTest(ModuleCase): - """ - Validate the mac_xattr module - """ - - @classmethod - def setUpClass(cls): - cls.test_file = os.path.join(RUNTIME_VARS.TMP, "xattr_test_file.txt") - cls.no_file = os.path.join(RUNTIME_VARS.TMP, "xattr_no_file.txt") - - def setUp(self): - """ - Create test file for testing extended attributes - """ - self.run_function("file.touch", [self.test_file]) - - def tearDown(self): - """ - Clean up test file - """ - if os.path.exists(self.test_file): - os.remove(self.test_file) - - @pytest.mark.slow_test - def test_list_no_xattr(self): - """ - Make sure there are no attributes - """ - # Clear existing attributes - self.assertTrue(self.run_function("xattr.clear", [self.test_file])) - - # Test no attributes - self.assertEqual(self.run_function("xattr.list", [self.test_file]), {}) - - # Test file not found - self.assertEqual( - self.run_function("xattr.list", [self.no_file]), - "ERROR: File not found: {}".format(self.no_file), - ) - - @pytest.mark.slow_test - def test_write(self): - """ - Write an attribute - """ - # Clear existing attributes - self.assertTrue(self.run_function("xattr.clear", [self.test_file])) - - # Write some attributes - self.assertTrue( - self.run_function( - "xattr.write", [self.test_file, "spongebob", "squarepants"] - ) - ) - self.assertTrue( - self.run_function("xattr.write", [self.test_file, "squidward", "plankton"]) - ) - self.assertTrue( - self.run_function("xattr.write", [self.test_file, "crabby", "patty"]) - ) - - # Test that they were actually added - self.assertEqual( - self.run_function("xattr.list", [self.test_file]), - {"spongebob": "squarepants", "squidward": "plankton", "crabby": "patty"}, - ) - - # Test file not found - self.assertEqual( - self.run_function("xattr.write", [self.no_file, "patrick", "jellyfish"]), - "ERROR: File not found: {}".format(self.no_file), - ) - - @pytest.mark.slow_test - def test_read(self): - """ - Test xattr.read - """ - # Clear existing attributes - self.assertTrue(self.run_function("xattr.clear", [self.test_file])) - - # Write an attribute - self.assertTrue( - self.run_function( - "xattr.write", [self.test_file, "spongebob", "squarepants"] - ) - ) - - # Read the attribute - self.assertEqual( - self.run_function("xattr.read", [self.test_file, "spongebob"]), - "squarepants", - ) - - # Test file not found - self.assertEqual( - self.run_function("xattr.read", [self.no_file, "spongebob"]), - "ERROR: File not found: {}".format(self.no_file), - ) - - # Test attribute not found - self.assertEqual( - self.run_function("xattr.read", [self.test_file, "patrick"]), - "ERROR: Attribute not found: patrick", - ) - - @pytest.mark.slow_test - def test_delete(self): - """ - Test xattr.delete - """ - # Clear existing attributes - self.assertTrue(self.run_function("xattr.clear", [self.test_file])) - - # Write some attributes - self.assertTrue( - self.run_function( - "xattr.write", [self.test_file, "spongebob", "squarepants"] - ) - ) - self.assertTrue( - self.run_function("xattr.write", [self.test_file, "squidward", "plankton"]) - ) - self.assertTrue( - self.run_function("xattr.write", [self.test_file, "crabby", "patty"]) - ) - - # Delete an attribute - self.assertTrue( - self.run_function("xattr.delete", [self.test_file, "squidward"]) - ) - - # Make sure it was actually deleted - self.assertEqual( - self.run_function("xattr.list", [self.test_file]), - {"spongebob": "squarepants", "crabby": "patty"}, - ) - - # Test file not found - self.assertEqual( - self.run_function("xattr.delete", [self.no_file, "spongebob"]), - "ERROR: File not found: {}".format(self.no_file), - ) - - # Test attribute not found - self.assertEqual( - self.run_function("xattr.delete", [self.test_file, "patrick"]), - "ERROR: Attribute not found: patrick", - ) - - @pytest.mark.slow_test - def test_clear(self): - """ - Test xattr.clear - """ - # Clear existing attributes - self.assertTrue(self.run_function("xattr.clear", [self.test_file])) - - # Write some attributes - self.assertTrue( - self.run_function( - "xattr.write", [self.test_file, "spongebob", "squarepants"] - ) - ) - self.assertTrue( - self.run_function("xattr.write", [self.test_file, "squidward", "plankton"]) - ) - self.assertTrue( - self.run_function("xattr.write", [self.test_file, "crabby", "patty"]) - ) - - # Test Clear - self.assertTrue(self.run_function("xattr.clear", [self.test_file])) - - # Test file not found - self.assertEqual( - self.run_function("xattr.clear", [self.no_file]), - "ERROR: File not found: {}".format(self.no_file), - ) diff --git a/tests/integration/modules/test_pip.py b/tests/integration/modules/test_pip.py deleted file mode 100644 index 83457b467c89..000000000000 --- a/tests/integration/modules/test_pip.py +++ /dev/null @@ -1,570 +0,0 @@ -import os -import pprint -import re -import shutil -import tempfile - -import pytest - -import salt.utils.files -import salt.utils.path -import salt.utils.platform -from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES -from tests.support.case import ModuleCase -from tests.support.helpers import VirtualEnv, patched_environ -from tests.support.runtests import RUNTIME_VARS - - -@pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False) -@pytest.mark.windows_whitelisted -class PipModuleTest(ModuleCase): - def setUp(self): - super().setUp() - self.venv_test_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) - # Remove the venv test directory - self.addCleanup(shutil.rmtree, self.venv_test_dir, ignore_errors=True) - self.venv_dir = os.path.join(self.venv_test_dir, "venv") - self.patched_environ = patched_environ( - PIP_SOURCE_DIR="", - PIP_BUILD_DIR="", - __cleanup__=[k for k in os.environ if k.startswith("PIP_")], - ) - self.patched_environ.__enter__() - self.addCleanup(self.patched_environ.__exit__) - - def _check_download_error(self, ret): - """ - Checks to see if a download error looks transitory - """ - return any(w in ret for w in ["URLError", "Download error"]) - - def pip_successful_install( - self, - target, - expect=( - "irc3-plugins-test", - "pep8", - ), - ): - """ - isolate regex for extracting `successful install` message from pip - """ - - expect = set(expect) - expect_str = "|".join(expect) - - success = re.search( - r"^.*Successfully installed\s([^\n]+)(?:Clean.*)?", target, re.M | re.S - ) - - success_for = ( - re.findall(r"({})(?:-(?:[\d\.-]))?".format(expect_str), success.groups()[0]) - if success - else [] - ) - - return expect.issubset(set(success_for)) - - @pytest.mark.slow_test - def test_issue_2087_missing_pip(self): - # Let's create the testing virtualenv - with VirtualEnv(self.venv_dir): - - # Let's remove the pip binary - pip_bin = os.path.join(self.venv_dir, "bin", "pip") - site_dir = self.run_function( - "virtualenv.get_distribution_path", [self.venv_dir, "pip"] - ) - if salt.utils.platform.is_windows(): - pip_bin = os.path.join(self.venv_dir, "Scripts", "pip.exe") - site_dir = os.path.join(self.venv_dir, "lib", "site-packages") - if not os.path.isfile(pip_bin): - self.skipTest("Failed to find the pip binary to the test virtualenv") - os.remove(pip_bin) - - # Also remove the pip dir from site-packages - # This is needed now that we're using python -m pip instead of the - # pip binary directly. python -m pip will still work even if the - # pip binary is missing - shutil.rmtree(os.path.join(site_dir, "pip")) - - # Let's run a pip depending functions - for func in ("pip.freeze", "pip.list"): - ret = self.run_function(func, bin_env=self.venv_dir) - assert ( - "Command required for '{}' not found: Could not find a `pip` binary".format( - func - ) - in ret - ) - - @pytest.mark.slow_test - def test_requirements_as_list_of_chains__cwd_set__absolute_file_path(self): - with VirtualEnv(self.venv_dir): - - # Create a requirements file that depends on another one. - - req1_filename = os.path.join(self.venv_dir, "requirements1.txt") - req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt") - req2_filename = os.path.join(self.venv_dir, "requirements2.txt") - req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt") - - with salt.utils.files.fopen(req1_filename, "w") as f: - f.write("-r requirements1b.txt\n") - with salt.utils.files.fopen(req1b_filename, "w") as f: - f.write("irc3-plugins-test\n") - with salt.utils.files.fopen(req2_filename, "w") as f: - f.write("-r requirements2b.txt\n") - with salt.utils.files.fopen(req2b_filename, "w") as f: - f.write("pep8\n") - - requirements_list = [req1_filename, req2_filename] - - ret = self.run_function( - "pip.install", - requirements=requirements_list, - bin_env=self.venv_dir, - cwd=self.venv_dir, - ) - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - assert ret["retcode"] == 0 - found = self.pip_successful_install(ret["stdout"]) - assert found - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_requirements_as_list_of_chains__cwd_not_set__absolute_file_path(self): - with VirtualEnv(self.venv_dir): - - # Create a requirements file that depends on another one. - - req1_filename = os.path.join(self.venv_dir, "requirements1.txt") - req1b_filename = os.path.join(self.venv_dir, "requirements1b.txt") - req2_filename = os.path.join(self.venv_dir, "requirements2.txt") - req2b_filename = os.path.join(self.venv_dir, "requirements2b.txt") - - with salt.utils.files.fopen(req1_filename, "w") as f: - f.write("-r requirements1b.txt\n") - with salt.utils.files.fopen(req1b_filename, "w") as f: - f.write("irc3-plugins-test\n") - with salt.utils.files.fopen(req2_filename, "w") as f: - f.write("-r requirements2b.txt\n") - with salt.utils.files.fopen(req2b_filename, "w") as f: - f.write("pep8\n") - - requirements_list = [req1_filename, req2_filename] - - ret = self.run_function( - "pip.install", requirements=requirements_list, bin_env=self.venv_dir - ) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - assert ret["retcode"] == 0 - found = self.pip_successful_install(ret["stdout"]) - assert found - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_requirements_as_list__absolute_file_path(self): - with VirtualEnv(self.venv_dir): - - req1_filename = os.path.join(self.venv_dir, "requirements.txt") - req2_filename = os.path.join(self.venv_dir, "requirements2.txt") - - with salt.utils.files.fopen(req1_filename, "w") as f: - f.write("irc3-plugins-test\n") - with salt.utils.files.fopen(req2_filename, "w") as f: - f.write("pep8\n") - - requirements_list = [req1_filename, req2_filename] - - ret = self.run_function( - "pip.install", requirements=requirements_list, bin_env=self.venv_dir - ) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - assert ret["retcode"] == 0 - found = self.pip_successful_install(ret["stdout"]) - assert found - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_requirements_as_list__non_absolute_file_path(self): - with VirtualEnv(self.venv_dir): - - # Create a requirements file that depends on another one. - - req1_filename = "requirements.txt" - req2_filename = "requirements2.txt" - req_cwd = self.venv_dir - - req1_filepath = os.path.join(req_cwd, req1_filename) - req2_filepath = os.path.join(req_cwd, req2_filename) - - with salt.utils.files.fopen(req1_filepath, "w") as f: - f.write("irc3-plugins-test\n") - with salt.utils.files.fopen(req2_filepath, "w") as f: - f.write("pep8\n") - - requirements_list = [req1_filename, req2_filename] - - ret = self.run_function( - "pip.install", - requirements=requirements_list, - bin_env=self.venv_dir, - cwd=req_cwd, - ) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - assert ret["retcode"] == 0 - found = self.pip_successful_install(ret["stdout"]) - assert found - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_chained_requirements__absolute_file_path(self): - with VirtualEnv(self.venv_dir): - - # Create a requirements file that depends on another one. - - req1_filename = os.path.join(self.venv_dir, "requirements.txt") - req2_filename = os.path.join(self.venv_dir, "requirements2.txt") - - with salt.utils.files.fopen(req1_filename, "w") as f: - f.write("-r requirements2.txt") - with salt.utils.files.fopen(req2_filename, "w") as f: - f.write("pep8") - - ret = self.run_function( - "pip.install", requirements=req1_filename, bin_env=self.venv_dir - ) - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - assert ret["retcode"] == 0 - assert "installed pep8" in ret["stdout"] - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_chained_requirements__non_absolute_file_path(self): - with VirtualEnv(self.venv_dir): - - # Create a requirements file that depends on another one. - req_basepath = self.venv_dir - - req1_filename = "requirements.txt" - req2_filename = "requirements2.txt" - - req1_file = os.path.join(self.venv_dir, req1_filename) - req2_file = os.path.join(self.venv_dir, req2_filename) - - with salt.utils.files.fopen(req1_file, "w") as f: - f.write("-r requirements2.txt") - with salt.utils.files.fopen(req2_file, "w") as f: - f.write("pep8") - - ret = self.run_function( - "pip.install", - requirements=req1_filename, - cwd=req_basepath, - bin_env=self.venv_dir, - ) - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - assert ret["retcode"] == 0 - assert "installed pep8" in ret["stdout"] - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_issue_4805_nested_requirements(self): - with VirtualEnv(self.venv_dir): - - # Create a requirements file that depends on another one. - req1_filename = os.path.join(self.venv_dir, "requirements.txt") - req2_filename = os.path.join(self.venv_dir, "requirements2.txt") - with salt.utils.files.fopen(req1_filename, "w") as f: - f.write("-r requirements2.txt") - with salt.utils.files.fopen(req2_filename, "w") as f: - f.write("pep8") - - ret = self.run_function( - "pip.install", - requirements=req1_filename, - bin_env=self.venv_dir, - timeout=300, - ) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - if self._check_download_error(ret["stdout"]): - self.skipTest("Test skipped due to pip download error") - assert ret["retcode"] == 0 - assert "installed pep8" in ret["stdout"] - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_pip_uninstall(self): - # Let's create the testing virtualenv - with VirtualEnv(self.venv_dir): - ret = self.run_function("pip.install", ["pep8"], bin_env=self.venv_dir) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - if self._check_download_error(ret["stdout"]): - self.skipTest("Test skipped due to pip download error") - assert ret["retcode"] == 0 - assert "installed pep8" in ret["stdout"] - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.uninstall' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - assert ret["retcode"] == 0 - assert "uninstalled pep8" in ret["stdout"] - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_pip_install_upgrade(self): - # Create the testing virtualenv - with VirtualEnv(self.venv_dir): - ret = self.run_function( - "pip.install", ["pep8==1.3.4"], bin_env=self.venv_dir - ) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - if self._check_download_error(ret["stdout"]): - self.skipTest("Test skipped due to pip download error") - assert ret["retcode"] == 0 - assert "installed pep8" in ret["stdout"] - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - ret = self.run_function( - "pip.install", ["pep8"], bin_env=self.venv_dir, upgrade=True - ) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - if self._check_download_error(ret["stdout"]): - self.skipTest("Test skipped due to pip download error") - assert ret["retcode"] == 0 - assert "installed pep8" in ret["stdout"] - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - ret = self.run_function("pip.uninstall", ["pep8"], bin_env=self.venv_dir) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.uninstall' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - assert ret["retcode"] == 0 - assert "uninstalled pep8" in ret["stdout"] - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_pip_install_multiple_editables(self): - editables = [ - "git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr", - "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", - ] - - # Create the testing virtualenv - with VirtualEnv(self.venv_dir): - ret = self.run_function( - "pip.install", - [], - editable="{}".format(",".join(editables)), - bin_env=self.venv_dir, - ) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - if self._check_download_error(ret["stdout"]): - self.skipTest("Test skipped due to pip download error") - assert ret["retcode"] == 0 - for package in ("iStr", "SaltTesting"): - match = re.search( - r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format( - package - ), - ret["stdout"], - ) - assert match is not None - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.slow_test - def test_pip_install_multiple_editables_and_pkgs(self): - editables = [ - "git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr", - "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", - ] - - # Create the testing virtualenv - with VirtualEnv(self.venv_dir): - ret = self.run_function( - "pip.install", - ["pep8"], - editable="{}".format(",".join(editables)), - bin_env=self.venv_dir, - ) - - if not isinstance(ret, dict): - self.fail( - "The 'pip.install' command did not return the excepted dictionary." - " Output:\n{}".format(ret) - ) - - try: - if self._check_download_error(ret["stdout"]): - self.skipTest("Test skipped due to pip download error") - assert ret["retcode"] == 0 - for package in ("iStr", "SaltTesting", "pep8"): - match = re.search( - r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format( - package - ), - ret["stdout"], - ) - assert match is not None - except KeyError as exc: - self.fail( - "The returned dictionary is missing an expected key. Error: '{}'." - " Dictionary: {}".format(exc, pprint.pformat(ret)) - ) - - @pytest.mark.skipif( - shutil.which("/bin/pip3") is None, reason="Could not find /bin/pip3" - ) - @pytest.mark.skip_on_windows(reason="test specific for linux usage of /bin/python") - @pytest.mark.skip_initial_gh_actions_failure( - reason="This was skipped on older golden images and is failing on newer." - ) - def test_system_pip3(self): - - self.run_function( - "pip.install", pkgs=["lazyimport==0.0.1"], bin_env="/bin/pip3" - ) - ret1 = self.run_function("cmd.run_all", ["/bin/pip3 freeze | grep lazyimport"]) - assert "lazyimport==0.0.1" in ret1["stdout"] - - self.run_function("pip.uninstall", pkgs=["lazyimport"], bin_env="/bin/pip3") - ret2 = self.run_function("cmd.run_all", ["/bin/pip3 freeze | grep lazyimport"]) - assert ret2["stdout"] == "" diff --git a/tests/integration/modules/test_publish.py b/tests/integration/modules/test_publish.py index fb1306e14a56..056955486439 100644 --- a/tests/integration/modules/test_publish.py +++ b/tests/integration/modules/test_publish.py @@ -44,7 +44,7 @@ def test_publish(self): self.assertTrue(name in ret) self.assertEqual(ret["cheese"], "spam") - self.assertEqual(ret["__pub_arg"], [{"cheese": "spam"}]) + self.assertEqual(ret["__pub_arg"], [{"__kwarg__": True, "cheese": "spam"}]) self.assertEqual(ret["__pub_id"], "minion") self.assertEqual(ret["__pub_fun"], "test.kwarg") @@ -125,7 +125,7 @@ def test_kwarg(self): self.assertTrue(name in ret) self.assertEqual(ret["cheese"], "spam") - self.assertEqual(ret["__pub_arg"], [{"cheese": "spam"}]) + self.assertEqual(ret["__pub_arg"], [{"__kwarg__": True, "cheese": "spam"}]) self.assertEqual(ret["__pub_id"], "minion") self.assertEqual(ret["__pub_fun"], "test.kwarg") diff --git a/tests/integration/modules/test_service.py b/tests/integration/modules/test_service.py deleted file mode 100644 index 7c070c6e2af8..000000000000 --- a/tests/integration/modules/test_service.py +++ /dev/null @@ -1,183 +0,0 @@ -import pytest - -import salt.utils.path -import salt.utils.platform -import salt.utils.systemd -from tests.support.case import ModuleCase - - -@pytest.mark.destructive_test -@pytest.mark.windows_whitelisted -class ServiceModuleTest(ModuleCase): - """ - Module testing the service module - """ - - def setUp(self): - self.service_name = "cron" - cmd_name = "crontab" - os_family = self.run_function("grains.get", ["os_family"]) - os_release = self.run_function("grains.get", ["osrelease"]) - if os_family == "RedHat": - if os_release[0] == "7": - self.skipTest( - "Disabled on CentOS 7 until we can fix SSH connection issues." - ) - self.service_name = "crond" - elif os_family == "Arch": - self.service_name = "sshd" - cmd_name = "systemctl" - elif os_family == "NILinuxRT": - self.service_name = "syslog" - cmd_name = "syslog-ng" - elif os_family == "MacOS": - self.service_name = "com.apple.AirPlayXPCHelper" - elif salt.utils.platform.is_windows(): - self.service_name = "Spooler" - - self.pre_srv_status = self.run_function("service.status", [self.service_name]) - self.pre_srv_enabled = ( - True - if self.service_name in self.run_function("service.get_enabled") - else False - ) - - if ( - salt.utils.path.which(cmd_name) is None - and not salt.utils.platform.is_windows() - ): - self.skipTest("{} is not installed".format(cmd_name)) - - def tearDown(self): - post_srv_status = self.run_function("service.status", [self.service_name]) - post_srv_enabled = ( - True - if self.service_name in self.run_function("service.get_enabled") - else False - ) - - if post_srv_status != self.pre_srv_status: - if self.pre_srv_status: - self.run_function("service.enable", [self.service_name]) - else: - self.run_function("service.disable", [self.service_name]) - - if post_srv_enabled != self.pre_srv_enabled: - if self.pre_srv_enabled: - self.run_function("service.enable", [self.service_name]) - else: - self.run_function("service.disable", [self.service_name]) - del self.service_name - - @pytest.mark.flaky(max_runs=4) - @pytest.mark.slow_test - def test_service_status_running(self): - """ - test service.status execution module - when service is running - """ - self.run_function("service.start", [self.service_name]) - check_service = self.run_function("service.status", [self.service_name]) - self.assertTrue(check_service) - - @pytest.mark.slow_test - def test_service_status_dead(self): - """ - test service.status execution module - when service is dead - """ - self.run_function("service.stop", [self.service_name]) - check_service = self.run_function("service.status", [self.service_name]) - self.assertFalse(check_service) - - @pytest.mark.slow_test - def test_service_restart(self): - """ - test service.restart - """ - self.assertTrue(self.run_function("service.restart", [self.service_name])) - - @pytest.mark.slow_test - def test_service_enable(self): - """ - test service.get_enabled and service.enable module - """ - # disable service before test - self.assertTrue(self.run_function("service.disable", [self.service_name])) - - self.assertTrue(self.run_function("service.enable", [self.service_name])) - self.assertIn(self.service_name, self.run_function("service.get_enabled")) - - @pytest.mark.slow_test - def test_service_disable(self): - """ - test service.get_disabled and service.disable module - """ - # enable service before test - self.assertTrue(self.run_function("service.enable", [self.service_name])) - - self.assertTrue(self.run_function("service.disable", [self.service_name])) - if salt.utils.platform.is_darwin(): - self.assertTrue(self.run_function("service.disabled", [self.service_name])) - else: - self.assertIn(self.service_name, self.run_function("service.get_disabled")) - - @pytest.mark.slow_test - def test_service_disable_doesnot_exist(self): - """ - test service.get_disabled and service.disable module - when service name does not exist - """ - # enable service before test - srv_name = "doesnotexist" - enable = self.run_function("service.enable", [srv_name]) - systemd = salt.utils.systemd.booted() - - # check service was not enabled - try: - self.assertFalse(enable) - except AssertionError: - self.assertIn("ERROR", enable) - - # check service was not disabled - if ( - tuple( - self.run_function("grains.item", ["osrelease_info"])["osrelease_info"] - ) - == (14, 0o4) - and not systemd - ): - # currently upstart does not have a mechanism to report if disabling a service fails if does not exist - self.assertTrue(self.run_function("service.disable", [srv_name])) - elif ( - self.run_function("grains.item", ["os"])["os"] == "Debian" - and self.run_function("grains.item", ["osmajorrelease"])["osmajorrelease"] - < 9 - and systemd - ): - # currently disabling a service via systemd that does not exist - # on Debian 8 results in a True return code - self.assertTrue(self.run_function("service.disable", [srv_name])) - else: - try: - disable = self.run_function("service.disable", [srv_name]) - self.assertFalse(disable) - except AssertionError: - self.assertTrue("error" in disable.lower()) - - if salt.utils.platform.is_darwin(): - self.assertEqual( - self.run_function("service.disabled", [srv_name]), - "ERROR: Service not found: {}".format(srv_name), - ) - else: - self.assertNotIn(srv_name, self.run_function("service.get_disabled")) - - @pytest.mark.skip_unless_on_windows - @pytest.mark.slow_test - def test_service_get_service_name(self): - """ - test service.get_service_name - """ - ret = self.run_function("service.get_service_name") - self.assertIn(self.service_name, ret.values()) diff --git a/tests/integration/modules/test_ssh.py b/tests/integration/modules/test_ssh.py index ac0e07395644..55586211622f 100644 --- a/tests/integration/modules/test_ssh.py +++ b/tests/integration/modules/test_ssh.py @@ -6,14 +6,14 @@ import shutil import pytest +import requests import salt.utils.files import salt.utils.platform -from salt.ext.tornado.httpclient import HTTPClient from tests.support.case import ModuleCase from tests.support.runtests import RUNTIME_VARS -GITHUB_FINGERPRINT = "9d:38:5b:83:a9:17:52:92:56:1a:5e:c4:d4:81:8e:0a:ca:51:a2:64:f1:74:20:11:2e:f8:8a:c3:a1:39:49:8f" +GITHUB_FINGERPRINT = "b8:d8:95:ce:d9:2c:0a:c0:e1:71:cd:2e:f5:ef:01:ba:34:17:55:4a:4a:64:80:d3:31:cc:c2:be:3d:ed:0f:6b" def check_status(): @@ -21,12 +21,13 @@ def check_status(): Check the status of Github for remote operations """ try: - return HTTPClient().fetch("http://github.com").code == 200 + return requests.get("https://github.com").status_code == 200 except Exception: # pylint: disable=broad-except return False -@pytest.mark.windows_whitelisted +# @pytest.mark.windows_whitelisted +# De-whitelist windows since it's hanging on the newer windows golden images @pytest.mark.skip_if_binaries_missing("ssh", "ssh-keygen", check_all=True) class SSHModuleTest(ModuleCase): """ @@ -49,9 +50,9 @@ def setUp(self): if not os.path.isdir(self.subsalt_dir): os.makedirs(self.subsalt_dir) - ssh_raw_path = os.path.join(RUNTIME_VARS.FILES, "ssh", "raw") - with salt.utils.files.fopen(ssh_raw_path) as fd: - self.key = fd.read().strip() + known_hosts_file = os.path.join(RUNTIME_VARS.FILES, "ssh", "known_hosts") + with salt.utils.files.fopen(known_hosts_file) as fd: + self.key = fd.read().strip().splitlines()[0].split()[-1] def tearDown(self): """ diff --git a/tests/integration/modules/test_status.py b/tests/integration/modules/test_status.py index b11f1d9e7677..3574e23a3a2c 100644 --- a/tests/integration/modules/test_status.py +++ b/tests/integration/modules/test_status.py @@ -14,6 +14,7 @@ class StatusModuleTest(ModuleCase): @pytest.mark.skip_on_windows @pytest.mark.flaky(max_runs=4) + @pytest.mark.slow_test def test_status_pid(self): """ status.pid @@ -43,6 +44,7 @@ def test_status_saltmem(self): self.assertTrue(isinstance(ret, int)) @pytest.mark.slow_test + @pytest.mark.skip_if_not_root def test_status_diskusage(self): """ status.diskusage diff --git a/tests/integration/modules/test_win_ntp.py b/tests/integration/modules/test_win_ntp.py index 1e4053edb8ea..1c52eb9ece24 100644 --- a/tests/integration/modules/test_win_ntp.py +++ b/tests/integration/modules/test_win_ntp.py @@ -6,6 +6,7 @@ @pytest.mark.flaky(max_runs=4) @pytest.mark.skip_unless_on_windows @pytest.mark.windows_whitelisted +@pytest.mark.slow_test class NTPTest(ModuleCase): """ Validate windows ntp module diff --git a/tests/integration/renderers/test_jinja.py b/tests/integration/renderers/test_jinja.py deleted file mode 100644 index f0fcd28ff9de..000000000000 --- a/tests/integration/renderers/test_jinja.py +++ /dev/null @@ -1,36 +0,0 @@ -import os - -import pytest - -import salt.utils.files -from tests.support.case import ModuleCase, ShellCase -from tests.support.helpers import with_tempdir - - -class JinjaRendererTest(ModuleCase): - @with_tempdir() - @pytest.mark.slow_test - def test_issue_54765(self, tmpdir): - file_path = os.path.join(tmpdir, "issue-54765") - ret = self.run_function( - "state.sls", mods="issue-54765", pillar={"file_path": file_path} - ) - key = "file_|-issue-54765_|-{}_|-managed".format(file_path) - assert key in ret - assert ret[key]["result"] is True - with salt.utils.files.fopen(file_path, "r") as fp: - assert fp.read().strip() == "bar" - - -class JinjaRenderCallTest(ShellCase): - @with_tempdir() - @pytest.mark.slow_test - def test_issue_54765(self, tmpdir): - file_path = os.path.join(tmpdir, "issue-54765") - pillar_str = '\'{{"file_path": "{}"}}\''.format(file_path) - ret = self.run_call( - "state.apply issue-54765 pillar={}".format(pillar_str), local=True - ) - assert " Result: True" in ret - with salt.utils.files.fopen(file_path, "r") as fp: - assert fp.read().strip() == "bar" diff --git a/tests/integration/runners/test_jobs.py b/tests/integration/runners/test_jobs.py deleted file mode 100644 index 82a367c45d0c..000000000000 --- a/tests/integration/runners/test_jobs.py +++ /dev/null @@ -1,110 +0,0 @@ -""" -Tests for the salt-run command -""" - -import pytest - -from tests.support.case import ShellCase - - -@pytest.mark.windows_whitelisted -@pytest.mark.usefixtures("salt_sub_minion") -class JobsTest(ShellCase): - """ - Test the jobs runner. - """ - - @pytest.mark.slow_test - def test_master(self): - """ - jobs.master - """ - ret = self.run_run_plus("jobs.master", _output="json") - self.assertEqual(ret["return"], []) - self.assertEqual(ret["out"], []) - - @pytest.mark.slow_test - def test_active(self): - """ - jobs.active - """ - ret = self.run_run_plus("jobs.active", _output="json") - self.assertEqual(ret["return"], {}) - self.assertEqual(ret["out"], {}) - - @pytest.mark.slow_test - def test_lookup_jid(self): - """ - jobs.lookup_jid - """ - ret = self.run_run_plus("jobs.lookup_jid", "23974239742394", _output="json") - self.assertEqual(ret["return"], {}) - self.assertEqual(ret["out"], {}) - - @pytest.mark.slow_test - def test_lookup_jid_invalid(self): - """ - jobs.lookup_jid - """ - ret = self.run_run_plus("jobs.lookup_jid", _output="json") - expected = "Passed invalid arguments:" - self.assertIn(expected, ret["return"]) - - @pytest.mark.slow_test - def test_list_jobs(self): - """ - jobs.list_jobs - """ - self.run_salt("minion test.echo test_list_jobs") - ret = self.run_run_plus("jobs.list_jobs", _output="json") - self.assertIsInstance(ret["return"], dict) - for job in ret["return"].values(): - if job["Function"] != "test.echo": - continue - if job["Arguments"] != ["test_list_jobs"]: - continue - # We our job in the list, we're good with the test - break - else: - self.fail("Did not our job from the jobs.list_jobs call") - - -@pytest.mark.windows_whitelisted -class LocalCacheTargetTest(ShellCase): - """ - Test that a job stored in the local_cache has target information - """ - - @pytest.mark.slow_test - def test_target_info(self): - """ - This is a test case for issue #48734 - - PR #43454 fixed an issue where "jobs.lookup_jid" was not working - correctly with external job caches. However, this fix for external - job caches broke some inner workings of job storage when using the - local_cache. - - We need to preserve the previous behavior for the local_cache, but - keep the new behavior for other external job caches. - - If "savefstr" is called in the local cache, the target data does not - get written to the local_cache, and the target-type gets listed as a - "list" type instead of "glob". - - This is a regression test for fixing the local_cache behavior. - """ - self.run_salt("minion test.echo target_info_test") - ret = self.run_run_plus("jobs.list_jobs", _output="json") - for item in ret["return"].values(): - if ( - item["Function"] == "test.echo" - and item["Arguments"][0] == "target_info_test" - ): - job_ret = item - tgt = job_ret["Target"] - tgt_type = job_ret["Target-type"] - - assert tgt != "unknown-target" - assert tgt in ["minion", "sub_minion"] - assert tgt_type == "glob" diff --git a/tests/integration/runners/test_nacl.py b/tests/integration/runners/test_nacl.py deleted file mode 100644 index 0be360b893d3..000000000000 --- a/tests/integration/runners/test_nacl.py +++ /dev/null @@ -1,200 +0,0 @@ -""" -Tests for the salt-run command -""" - -import logging -import sys - -import pytest - -from tests.support.case import ShellCase - -try: - import libnacl.sealed # pylint: disable=unused-import - import libnacl.secret # pylint: disable=unused-import - - HAS_LIBNACL = True -except (ImportError, OSError, AttributeError): - HAS_LIBNACL = False - -log = logging.getLogger(__name__) - - -@pytest.mark.skipif( - not HAS_LIBNACL, reason="skipping test_nacl, reason=libnacl is unavailable" -) -@pytest.mark.skipif(sys.version_info >= (3, 10), reason="Segfaults with python 3") -@pytest.mark.windows_whitelisted -class NaclTest(ShellCase): - """ - Test the nacl runner - """ - - @pytest.mark.slow_test - def test_keygen(self): - """ - Test keygen - """ - # Store the data - ret = self.run_run_plus( - "nacl.keygen", - ) - self.assertIn("pk", ret["return"]) - self.assertIn("sk", ret["return"]) - - @pytest.mark.slow_test - def test_enc(self): - """ - Test keygen - """ - # Store the data - ret = self.run_run_plus( - "nacl.keygen", - ) - self.assertIn("pk", ret["return"]) - self.assertIn("sk", ret["return"]) - pk = ret["return"]["pk"] - sk = ret["return"]["sk"] - - unencrypted_data = "hello" - - # Encrypt with pk - ret = self.run_run_plus( - "nacl.enc", - data=unencrypted_data, - pk=pk, - ) - self.assertIn("return", ret) - - @pytest.mark.slow_test - def test_enc_dec(self): - """ - Store, list, fetch, then flush data - """ - # Store the data - ret = self.run_run_plus( - "nacl.keygen", - ) - self.assertIn("pk", ret["return"]) - self.assertIn("sk", ret["return"]) - pk = ret["return"]["pk"] - sk = ret["return"]["sk"] - - unencrypted_data = b"hello" - - # Encrypt with pk - ret = self.run_run_plus( - "nacl.enc", - data=unencrypted_data, - pk=pk, - ) - self.assertIn("return", ret) - encrypted_data = ret["return"] - - # Decrypt with sk - ret = self.run_run_plus( - "nacl.dec", - data=encrypted_data, - sk=sk, - ) - self.assertIn("return", ret) - self.assertEqual(unencrypted_data, ret["return"]) - - @pytest.mark.slow_test - def test_sealedbox_enc_dec(self): - """ - Generate keys, encrypt, then decrypt. - """ - # Store the data - ret = self.run_run_plus( - "nacl.keygen", - ) - self.assertIn("pk", ret["return"]) - self.assertIn("sk", ret["return"]) - pk = ret["return"]["pk"] - sk = ret["return"]["sk"] - - unencrypted_data = b"hello" - - # Encrypt with pk - ret = self.run_run_plus( - "nacl.sealedbox_encrypt", - data=unencrypted_data, - pk=pk, - ) - encrypted_data = ret["return"] - - # Decrypt with sk - ret = self.run_run_plus( - "nacl.sealedbox_decrypt", - data=encrypted_data, - sk=sk, - ) - self.assertEqual(unencrypted_data, ret["return"]) - - @pytest.mark.slow_test - def test_secretbox_enc_dec(self): - """ - Generate keys, encrypt, then decrypt. - """ - # Store the data - ret = self.run_run_plus( - "nacl.keygen", - ) - self.assertIn("pk", ret["return"]) - self.assertIn("sk", ret["return"]) - pk = ret["return"]["pk"] - sk = ret["return"]["sk"] - - unencrypted_data = b"hello" - - # Encrypt with pk - ret = self.run_run_plus( - "nacl.secretbox_encrypt", - data=unencrypted_data, - sk=sk, - ) - encrypted_data = ret["return"] - - # Decrypt with sk - ret = self.run_run_plus( - "nacl.secretbox_decrypt", - data=encrypted_data, - sk=sk, - ) - self.assertEqual(unencrypted_data, ret["return"]) - - @pytest.mark.slow_test - def test_enc_dec_no_pk_no_sk(self): - """ - Store, list, fetch, then flush data - """ - # Store the data - ret = self.run_run_plus( - "nacl.keygen", - ) - self.assertIn("pk", ret["return"]) - self.assertIn("sk", ret["return"]) - pk = ret["return"]["pk"] - sk = ret["return"]["sk"] - - unencrypted_data = b"hello" - - # Encrypt with pk - ret = self.run_run_plus( - "nacl.enc", - data=unencrypted_data, - pk=None, - ) - self.assertIn("Exception: no pubkey or pk_file found", ret["return"]) - - self.assertIn("return", ret) - encrypted_data = ret["return"] - - # Decrypt with sk - ret = self.run_run_plus( - "nacl.dec", - data=encrypted_data, - sk=None, - ) - self.assertIn("Exception: no key or sk_file found", ret["return"]) diff --git a/tests/integration/ssh/test_pre_flight.py b/tests/integration/ssh/test_pre_flight.py deleted file mode 100644 index 1598b3d51b5d..000000000000 --- a/tests/integration/ssh/test_pre_flight.py +++ /dev/null @@ -1,132 +0,0 @@ -""" -Test for ssh_pre_flight roster option -""" - -import os - -import pytest - -import salt.utils.files -from tests.support.case import SSHCase -from tests.support.runtests import RUNTIME_VARS - - -class SSHPreFlightTest(SSHCase): - """ - Test ssh_pre_flight roster option - """ - - def setUp(self): - super().setUp() - self.roster = os.path.join(RUNTIME_VARS.TMP, "pre_flight_roster") - self.data = { - "ssh_pre_flight": os.path.join(RUNTIME_VARS.TMP, "ssh_pre_flight.sh") - } - self.test_script = os.path.join( - RUNTIME_VARS.TMP, "test-pre-flight-script-worked.txt" - ) - - def _create_roster(self, pre_flight_script_args=None): - data = dict(self.data) - if pre_flight_script_args: - data["ssh_pre_flight_args"] = pre_flight_script_args - - self.custom_roster(self.roster, data) - - with salt.utils.files.fopen(data["ssh_pre_flight"], "w") as fp_: - fp_.write("touch {}".format(self.test_script)) - - @pytest.mark.slow_test - def test_ssh_pre_flight(self): - """ - test ssh when ssh_pre_flight is set - ensure the script runs successfully - """ - self._create_roster() - assert self.run_function("test.ping", roster_file=self.roster) - - assert os.path.exists(self.test_script) - - @pytest.mark.slow_test - def test_ssh_run_pre_flight(self): - """ - test ssh when --pre-flight is passed to salt-ssh - to ensure the script runs successfully - """ - self._create_roster() - # make sure we previously ran a command so the thin dir exists - self.run_function("test.ping", wipe=False) - assert not os.path.exists(self.test_script) - - assert self.run_function( - "test.ping", ssh_opts="--pre-flight", roster_file=self.roster, wipe=False - ) - assert os.path.exists(self.test_script) - - @pytest.mark.slow_test - def test_ssh_run_pre_flight_args(self): - """ - test ssh when --pre-flight is passed to salt-ssh - to ensure the script runs successfully passing some args - """ - self._create_roster(pre_flight_script_args="foobar test") - # make sure we previously ran a command so the thin dir exists - self.run_function("test.ping", wipe=False) - assert not os.path.exists(self.test_script) - - assert self.run_function( - "test.ping", ssh_opts="--pre-flight", roster_file=self.roster, wipe=False - ) - assert os.path.exists(self.test_script) - - @pytest.mark.slow_test - def test_ssh_run_pre_flight_args_prevent_injection(self): - """ - test ssh when --pre-flight is passed to salt-ssh - and evil arguments are used in order to produce shell injection - """ - injected_file = os.path.join(RUNTIME_VARS.TMP, "injection") - self._create_roster( - pre_flight_script_args="foobar; echo injected > {}".format(injected_file) - ) - # make sure we previously ran a command so the thin dir exists - self.run_function("test.ping", wipe=False) - assert not os.path.exists(self.test_script) - assert not os.path.isfile(injected_file) - - assert self.run_function( - "test.ping", ssh_opts="--pre-flight", roster_file=self.roster, wipe=False - ) - - assert not os.path.isfile( - injected_file - ), "File injection suceeded. This shouldn't happend" - - @pytest.mark.slow_test - def test_ssh_run_pre_flight_failure(self): - """ - test ssh_pre_flight when there is a failure - in the script. - """ - self._create_roster() - with salt.utils.files.fopen(self.data["ssh_pre_flight"], "w") as fp_: - fp_.write("exit 2") - - ret = self.run_function( - "test.ping", ssh_opts="--pre-flight", roster_file=self.roster, wipe=False - ) - assert ret["retcode"] == 2 - - def tearDown(self): - """ - make sure to clean up any old ssh directories - """ - files = [ - self.roster, - self.data["ssh_pre_flight"], - self.test_script, - os.path.join(RUNTIME_VARS.TMP, "injection"), - ] - for fp_ in files: - if os.path.exists(fp_): - os.remove(fp_) diff --git a/tests/integration/ssh/test_state.py b/tests/integration/ssh/test_state.py index 4f4d2bf8a517..a9fd3e7f2d3e 100644 --- a/tests/integration/ssh/test_state.py +++ b/tests/integration/ssh/test_state.py @@ -17,6 +17,7 @@ log = logging.getLogger(__name__) +@pytest.mark.slow_test class SSHStateTest(SSHCase): """ testing the state system with salt-ssh diff --git a/tests/integration/states/test_archive.py b/tests/integration/states/test_archive.py index 7d2dba522107..d940db5ecd26 100644 --- a/tests/integration/states/test_archive.py +++ b/tests/integration/states/test_archive.py @@ -106,6 +106,7 @@ def test_archive_extracted_skip_verify(self): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_source_hash(self): """ test archive.extracted without skip_verify @@ -127,6 +128,7 @@ def test_archive_extracted_with_source_hash(self): self._check_extracted(self.untar_file) @pytest.mark.skip_if_not_root + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_root_user_and_group(self): """ test archive.extracted with user and group set to "root" @@ -151,6 +153,7 @@ def test_archive_extracted_with_root_user_and_group(self): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_strip_in_options(self): """ test archive.extracted with --strip in options @@ -170,6 +173,7 @@ def test_archive_extracted_with_strip_in_options(self): self._check_extracted(os.path.join(ARCHIVE_DIR, "README")) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_strip_components_in_options(self): """ test archive.extracted with --strip-components in options @@ -190,6 +194,7 @@ def test_archive_extracted_with_strip_components_in_options(self): self._check_extracted(os.path.join(ARCHIVE_DIR, "README")) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_without_archive_format(self): """ test archive.extracted with no archive_format option @@ -206,6 +211,7 @@ def test_archive_extracted_without_archive_format(self): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_archive_extracted_with_cmd_unzip_false(self): """ test archive.extracted using use_cmd_unzip argument as false @@ -240,6 +246,7 @@ def test_local_archive_extracted(self): self._check_extracted(self.untar_file) + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_skip_verify(self): """ test archive.extracted with local file, bad hash and skip_verify @@ -258,6 +265,7 @@ def test_local_archive_extracted_skip_verify(self): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_source_hash(self): """ test archive.extracted with local file and valid hash @@ -275,6 +283,7 @@ def test_local_archive_extracted_with_source_hash(self): self._check_extracted(self.untar_file) @pytest.mark.slow_test + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_bad_source_hash(self): """ test archive.extracted with local file and bad hash @@ -289,6 +298,7 @@ def test_local_archive_extracted_with_bad_source_hash(self): self.assertSaltFalseReturn(ret) + @pytest.mark.skip_on_fips_enabled_platform def test_local_archive_extracted_with_uppercase_source_hash(self): """ test archive.extracted with local file and bad hash diff --git a/tests/integration/states/test_lxd.py b/tests/integration/states/test_lxd.py index 7aee81f96cda..34a5efcf1612 100644 --- a/tests/integration/states/test_lxd.py +++ b/tests/integration/states/test_lxd.py @@ -13,6 +13,7 @@ @pytest.mark.skipif(salt.modules.lxd.HAS_PYLXD is False, reason="pylxd not installed") @pytest.mark.skip_if_binaries_missing("lxd", reason="LXD not installed") @pytest.mark.skip_if_binaries_missing("lxc", reason="LXC not installed") +@pytest.mark.slow_test class LxdTestCase(ModuleCase, SaltReturnAssertsMixin): @pytest.mark.flaky(max_runs=4) def test_01__init_lxd(self): diff --git a/tests/integration/states/test_lxd_container.py b/tests/integration/states/test_lxd_container.py index 4ee92d33fefb..a7060562a90f 100644 --- a/tests/integration/states/test_lxd_container.py +++ b/tests/integration/states/test_lxd_container.py @@ -13,6 +13,7 @@ @pytest.mark.skipif(salt.modules.lxd.HAS_PYLXD is False, reason="pylxd not installed") @pytest.mark.skip_if_binaries_missing("lxd", reason="LXD not installed") @pytest.mark.skip_if_binaries_missing("lxc", reason="LXC not installed") +@pytest.mark.slow_test class LxdContainerTestCase(ModuleCase, SaltReturnAssertsMixin): def setUp(self): self.run_state( diff --git a/tests/integration/states/test_service.py b/tests/integration/states/test_service.py deleted file mode 100644 index 8607c92b2785..000000000000 --- a/tests/integration/states/test_service.py +++ /dev/null @@ -1,119 +0,0 @@ -""" -Tests for the service state -""" - -import re - -import pytest - -import salt.utils.path -import salt.utils.platform -from tests.support.case import ModuleCase -from tests.support.mixins import SaltReturnAssertsMixin - -INIT_DELAY = 5 - - -@pytest.mark.windows_whitelisted -@pytest.mark.destructive_test -class ServiceTest(ModuleCase, SaltReturnAssertsMixin): - """ - Validate the service state - """ - - def setUp(self): - self.service_name = "cron" - cmd_name = "crontab" - os_family = self.run_function("grains.get", ["os_family"]) - os_release = self.run_function("grains.get", ["osrelease"]) - is_systemd = self.run_function("grains.get", ["systemd"]) - self.stopped = False - self.running = True - if os_family == "RedHat": - self.service_name = "crond" - elif os_family == "Arch": - self.service_name = "sshd" - cmd_name = "systemctl" - elif os_family == "MacOS": - self.service_name = "com.apple.AirPlayXPCHelper" - elif os_family == "Windows": - self.service_name = "Spooler" - - self.pre_srv_enabled = ( - True - if self.service_name in self.run_function("service.get_enabled") - else False - ) - self.post_srv_disable = False - if not self.pre_srv_enabled: - self.run_function("service.enable", name=self.service_name) - self.post_srv_disable = True - - if os_family != "Windows" and salt.utils.path.which(cmd_name) is None: - self.skipTest("{} is not installed".format(cmd_name)) - - if is_systemd and self.run_function("service.offline"): - self.skipTest("systemd is OFFLINE") - - def tearDown(self): - if self.post_srv_disable: - self.run_function("service.disable", name=self.service_name) - - def check_service_status(self, exp_return): - """ - helper method to check status of service - """ - check_status = self.run_function("service.status", name=self.service_name) - - try: - if not re.match(exp_return, check_status): - self.fail("status of service is not returning correctly") - except TypeError: - if check_status is not exp_return: - self.fail("status of service is not returning correctly") - - @pytest.mark.slow_test - def test_service_running(self): - """ - test service.running state module - """ - if self.run_function("service.status", name=self.service_name): - stop_service = self.run_function("service.stop", name=self.service_name) - self.assertTrue(stop_service) - self.check_service_status(self.stopped) - - if salt.utils.platform.is_darwin(): - # make sure the service is enabled on macosx - enable = self.run_function("service.enable", name=self.service_name) - - start_service = self.run_state("service.running", name=self.service_name) - self.assertTrue(start_service) - self.check_service_status(self.running) - - @pytest.mark.slow_test - def test_service_dead(self): - """ - test service.dead state module - """ - start_service = self.run_state("service.running", name=self.service_name) - self.assertSaltTrueReturn(start_service) - self.check_service_status(self.running) - - ret = self.run_state("service.dead", name=self.service_name) - self.assertSaltTrueReturn(ret) - self.check_service_status(self.stopped) - - @pytest.mark.slow_test - def test_service_dead_init_delay(self): - """ - test service.dead state module with init_delay arg - """ - start_service = self.run_state("service.running", name=self.service_name) - self.assertSaltTrueReturn(start_service) - self.check_service_status(self.running) - - ret = self.run_state( - "service.dead", name=self.service_name, init_delay=INIT_DELAY - ) - self.assertSaltTrueReturn(ret) - self.check_service_status(self.stopped) diff --git a/tests/integration/states/test_ssh_known_hosts.py b/tests/integration/states/test_ssh_known_hosts.py index 979a21728460..d5e7b8b25bb2 100644 --- a/tests/integration/states/test_ssh_known_hosts.py +++ b/tests/integration/states/test_ssh_known_hosts.py @@ -11,7 +11,7 @@ from tests.support.mixins import SaltReturnAssertsMixin from tests.support.runtests import RUNTIME_VARS -GITHUB_FINGERPRINT = "9d:38:5b:83:a9:17:52:92:56:1a:5e:c4:d4:81:8e:0a:ca:51:a2:64:f1:74:20:11:2e:f8:8a:c3:a1:39:49:8f" +GITHUB_FINGERPRINT = "b8:d8:95:ce:d9:2c:0a:c0:e1:71:cd:2e:f5:ef:01:ba:34:17:55:4a:4a:64:80:d3:31:cc:c2:be:3d:ed:0f:6b" GITHUB_IP = "140.82.121.4" diff --git a/tests/integration/utils/test_win_runas.py b/tests/integration/utils/test_win_runas.py index cd8c95b9da58..41d4169d9458 100644 --- a/tests/integration/utils/test_win_runas.py +++ b/tests/integration/utils/test_win_runas.py @@ -10,10 +10,10 @@ import traceback import pytest -import yaml import salt.utils.files import salt.utils.win_runas +import salt.utils.yaml from tests.support.case import ModuleCase from tests.support.helpers import with_system_user from tests.support.mock import Mock @@ -658,7 +658,7 @@ def test_runas_service(self, username, timeout=200): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 1, ret @with_system_user( @@ -676,7 +676,7 @@ def test_runas_service_no_pass(self, username, timeout=200): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 1, ret @with_system_user( @@ -698,7 +698,7 @@ def test_runas_service_admin(self, username, timeout=200): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 0, ret @with_system_user( @@ -720,7 +720,7 @@ def test_runas_service_admin_no_pass(self, username, timeout=200): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 0, ret def test_runas_service_system_user(self): @@ -735,5 +735,5 @@ def test_runas_service_system_user(self): win32serviceutil.StartService("test service") wait_for_service("test service") with salt.utils.files.fopen(RUNAS_OUT, "r") as fp: - ret = yaml.load(fp) + ret = salt.utils.yaml.safe_load(fp) assert ret["retcode"] == 0, ret diff --git a/tests/pkg/rpm/salt.spec b/tests/pkg/rpm/salt.spec deleted file mode 100644 index 7d8183a9d87b..000000000000 --- a/tests/pkg/rpm/salt.spec +++ /dev/null @@ -1,470 +0,0 @@ -# Maintainer: Erik Johnson (https://github.com/terminalmage) -# -# This is a modified version of the spec file, which supports git builds. It -# should be kept more or less up-to-date with upstream changes. -# -# Please contact the maintainer before submitting any pull requests for this -# spec file. - -%if ! (0%{?rhel} >= 6 || 0%{?fedora} > 12) -%global with_python26 1 -%define pybasever 2.6 -%define __python_ver 26 -%define __python %{_bindir}/python%{?pybasever} -%endif - -%global include_tests 0 - -%{!?python_sitelib: %global python_sitelib %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib())")} -%{!?python_sitearch: %global python_sitearch %(%{__python} -c "from distutils.sysconfig import get_python_lib; print(get_python_lib(1))")} -%{!?pythonpath: %global pythonpath %(%{__python} -c "import os, sys; print(os.pathsep.join(x for x in sys.path if x))")} - -%global srcver REPLACE_ME - -Name: salt -Version: REPLACE_ME -Release: 1%{?dist} -Summary: A parallel remote execution system - -Group: System Environment/Daemons -License: ASL 2.0 -URL: http://saltstack.org/ -Source0: %{name}-%{srcver}.tar.gz -Source1: %{name}-master -Source2: %{name}-syndic -Source3: %{name}-minion -Source4: %{name}-api -Source5: %{name}-master.service -Source6: %{name}-syndic.service -Source7: %{name}-minion.service -Source8: %{name}-api.service -Source9: README.fedora -Source10: logrotate.salt -Source11: salt.bash - -BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root-%(%{__id_u} -n) -BuildArch: noarch - -%ifarch %{ix86} x86_64 -Requires: dmidecode -%endif - -Requires: pciutils -Requires: which -Requires: yum-utils - -%if 0%{?with_python26} - -BuildRequires: python26-devel -Requires: python26-crypto >= 2.6.1 -Requires: python26-jinja2 -Requires: python26-msgpack > 0.3 -Requires: python26-PyYAML -Requires: python26-tornado >= 4.2.1 -Requires: python26-zmq -Requires: python26-six - -%else - -%if ((0%{?rhel} >= 6 || 0%{?fedora} > 12) && 0%{?include_tests}) -BuildRequires: python-tornado >= 4.2.1 -BuildRequires: python-futures >= 2.0 -BuildRequires: python-crypto >= 2.6.1 -BuildRequires: python-jinja2 -BuildRequires: python-msgpack > 0.3 -BuildRequires: python-pip -BuildRequires: python-zmq -BuildRequires: PyYAML -# this BR causes windows tests to happen -# clearly, that's not desired -# https://github.com/saltstack/salt/issues/3749 -BuildRequires: git -BuildRequires: python-libcloud -BuildRequires: python-six - - -%endif -BuildRequires: python-devel -Requires: m2crypto -Requires: python-crypto -Requires: python-zmq -Requires: python-jinja2 -Requires: PyYAML -Requires: python-msgpack -Requires: python-requests - -%endif - -%if ! (0%{?rhel} >= 7 || 0%{?fedora} >= 15) - -Requires(post): chkconfig -Requires(preun): chkconfig -Requires(preun): initscripts -Requires(postun): initscripts - -%else - -%if 0%{?systemd_preun:1} - -Requires(post): systemd-units -Requires(preun): systemd-units -Requires(postun): systemd-units - -%endif - -BuildRequires: systemd-units -Requires: systemd-python - -%endif - -%description -Salt is a distributed remote execution system used to execute commands and -query data. It was developed in order to bring the best solutions found in -the world of remote execution together and make them better, faster and more -malleable. Salt accomplishes this via its ability to handle larger loads of -information, and not just dozens, but hundreds or even thousands of individual -servers, handle them quickly and through a simple and manageable interface. - -%package master -Summary: Management component for salt, a parallel remote execution system -Group: System Environment/Daemons -Requires: %{name} = %{version}-%{release} -%if (0%{?rhel} >= 7 || 0%{?fedora} >= 15) -Requires: systemd-python -%endif - -%description master -The Salt master is the central server to which all minions connect. - -%package minion -Summary: Client component for Salt, a parallel remote execution system -Group: System Environment/Daemons -Requires: %{name} = %{version}-%{release} - -%description minion -The Salt minion is the agent component of Salt. It listens for instructions -from the master, runs jobs, and returns results back to the master. - -%package syndic -Summary: Master-of-master component for Salt, a parallel remote execution system -Group: System Environment/Daemons -Requires: %{name}-master = %{version}-%{release} - -%description syndic -The Salt syndic is a master daemon which can receive instruction from a -higher-level master, allowing for tiered organization of your Salt -infrastructure. - -%package api -Summary: REST API for Salt, a parallel remote execution system -Group: System administration tools -Requires: %{name}-master = %{version}-%{release} -%if 0%{?with_python26} -Requires: python26-cherrypy -%else -Requires: python-cherrypy -%endif - - -%description api -salt-api provides a REST interface to the Salt master. - -%package cloud -Summary: Cloud provisioner for Salt, a parallel remote execution system -Group: System administration tools -Requires: %{name}-master = %{version}-%{release} -%if 0%{?with_python26} -Requires: python26-libcloud -%else -Requires: python-libcloud -%endif - -%description cloud -The salt-cloud tool provisions new cloud VMs, installs salt-minion on them, and -adds them to the master's collection of controllable minions. - -%package ssh -Summary: Agentless SSH-based version of Salt, a parallel remote execution system -Group: System administration tools -Requires: %{name} = %{version}-%{release} - -%description ssh -The salt-ssh tool can run remote execution functions and states without the use -of an agent (salt-minion) service. - -%prep -%setup -n %{name}-%{srcver} - -%build - - -%install -rm -rf %{buildroot} -%{__python} setup.py install -O1 --root %{buildroot} - -# Add some directories -install -d -m 0755 %{buildroot}%{_var}/log/salt -touch %{buildroot}%{_var}/log/salt/minion -touch %{buildroot}%{_var}/log/salt/master -install -d -m 0755 %{buildroot}%{_var}/cache/salt -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/master.d -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/minion.d -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/pki -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/pki/master -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/pki/minion -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/cloud.conf.d -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/cloud.deploy.d -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/cloud.maps.d -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/cloud.profiles.d -install -d -m 0755 %{buildroot}%{_sysconfdir}/salt/cloud.providers.d - -# Add the config files -install -p -m 0640 conf/minion %{buildroot}%{_sysconfdir}/salt/minion -install -p -m 0640 conf/master %{buildroot}%{_sysconfdir}/salt/master -install -p -m 0640 conf/cloud %{buildroot}%{_sysconfdir}/salt/cloud -install -p -m 0640 conf/roster %{buildroot}%{_sysconfdir}/salt/roster -install -p -m 0640 conf/proxy %{buildroot}%{_sysconfdir}/salt/proxy - -%if ! (0%{?rhel} >= 7 || 0%{?fedora} >= 15) -mkdir -p %{buildroot}%{_initrddir} -install -p %{SOURCE1} %{buildroot}%{_initrddir}/ -install -p %{SOURCE2} %{buildroot}%{_initrddir}/ -install -p %{SOURCE3} %{buildroot}%{_initrddir}/ -install -p %{SOURCE4} %{buildroot}%{_initrddir}/ -%else -mkdir -p %{buildroot}%{_unitdir} -install -p -m 0644 %{SOURCE5} %{buildroot}%{_unitdir}/ -install -p -m 0644 %{SOURCE6} %{buildroot}%{_unitdir}/ -install -p -m 0644 %{SOURCE7} %{buildroot}%{_unitdir}/ -install -p -m 0644 %{SOURCE8} %{buildroot}%{_unitdir}/ -%endif - -# Force python2.6 on EPEL6 -# https://github.com/saltstack/salt/issues/22003 -%if 0%{?rhel} == 6 -sed -i 's#/usr/bin/python#/usr/bin/python2.6#g' %{buildroot}%{_bindir}/salt* -sed -i 's#/usr/bin/python#/usr/bin/python2.6#g' %{buildroot}%{_initrddir}/salt* -%endif - -install -p %{SOURCE9} . - -# Logrotate -mkdir -p %{buildroot}%{_sysconfdir}/logrotate.d/ -install -p %{SOURCE10} %{buildroot}%{_sysconfdir}/logrotate.d/salt - -# Bash completion -mkdir -p %{buildroot}%{_sysconfdir}/bash_completion.d/ -install -p -m 0644 %{SOURCE11} %{buildroot}%{_sysconfdir}/bash_completion.d/salt.bash - -%clean -rm -rf %{buildroot} - -%files -%defattr(-,root,root,-) -%doc LICENSE -%doc %{_mandir}/man1/spm.1.* -%{python_sitelib}/%{name}/* -%{python_sitelib}/%{name}-*-py?.?.egg-info -%{_sysconfdir}/logrotate.d/salt -%{_sysconfdir}/bash_completion.d/salt.bash -%{_var}/cache/salt -%{_var}/log/salt -%doc README.fedora -%{_bindir}/spm -%config(noreplace) %{_sysconfdir}/salt/ -%config(noreplace) %{_sysconfdir}/salt/pki - -%files master -%defattr(-,root,root) -%doc %{_mandir}/man7/salt.7.* -%doc %{_mandir}/man1/salt-cp.1.* -%doc %{_mandir}/man1/salt-key.1.* -%doc %{_mandir}/man1/salt-master.1.* -%doc %{_mandir}/man1/salt-run.1.* -%{_bindir}/salt -%{_bindir}/salt-cp -%{_bindir}/salt-key -%{_bindir}/salt-master -%{_bindir}/salt-run -%if ! (0%{?rhel} >= 7 || 0%{?fedora} >= 15) -%attr(0755, root, root) %{_initrddir}/salt-master -%else -%{_unitdir}/salt-master.service -%endif -%config(noreplace) %{_sysconfdir}/salt/master -%config(noreplace) %{_sysconfdir}/salt/master.d -%config(noreplace) %{_sysconfdir}/salt/pki/master -%config(noreplace) %{_var}/log/salt/master - -%files minion -%defattr(-,root,root) -%doc %{_mandir}/man1/salt-call.1.* -%doc %{_mandir}/man1/salt-minion.1.* -%doc %{_mandir}/man1/salt-proxy.1.* -%{_bindir}/salt-minion -%{_bindir}/salt-call -%{_bindir}/salt-proxy -%if ! (0%{?rhel} >= 7 || 0%{?fedora} >= 15) -%attr(0755, root, root) %{_initrddir}/salt-minion -%else -%{_unitdir}/salt-minion.service -%endif -%config(noreplace) %{_sysconfdir}/salt/minion -%config(noreplace) %{_sysconfdir}/salt/proxy -%config(noreplace) %{_sysconfdir}/salt/minion.d -%config(noreplace) %{_sysconfdir}/salt/pki/minion -%config(noreplace) %{_var}/log/salt/minion - -%files syndic -%doc %{_mandir}/man1/salt-syndic.1.* -%{_bindir}/salt-syndic -%if ! (0%{?rhel} >= 7 || 0%{?fedora} >= 15) -%attr(0755, root, root) %{_initrddir}/salt-syndic -%else -%{_unitdir}/salt-syndic.service -%endif - -%files api -%defattr(-,root,root) -%doc %{_mandir}/man1/salt-api.1.* -%{_bindir}/salt-api -%if ! (0%{?rhel} >= 7 || 0%{?fedora} >= 15) -%attr(0755, root, root) %{_initrddir}/salt-api -%else -%{_unitdir}/salt-api.service -%endif - -%files cloud -%doc %{_mandir}/man1/salt-cloud.1.* -%{_bindir}/salt-cloud -%{_sysconfdir}/salt/cloud.conf.d -%{_sysconfdir}/salt/cloud.deploy.d -%{_sysconfdir}/salt/cloud.maps.d -%{_sysconfdir}/salt/cloud.profiles.d -%{_sysconfdir}/salt/cloud.providers.d -%config(noreplace) %{_sysconfdir}/salt/cloud - -%files ssh -%doc %{_mandir}/man1/salt-ssh.1.* -%{_bindir}/salt-ssh -%config(noreplace) %{_sysconfdir}/salt/roster - - -# less than RHEL 8 / Fedora 16 -# not sure if RHEL 7 will use systemd yet -%if ! (0%{?rhel} >= 7 || 0%{?fedora} >= 15) - -%preun master - if [ $1 -eq 0 ] ; then - /sbin/service salt-master stop >/dev/null 2>&1 - /sbin/chkconfig --del salt-master - fi - -%preun syndic - if [ $1 -eq 0 ] ; then - /sbin/service salt-syndic stop >/dev/null 2>&1 - /sbin/chkconfig --del salt-syndic - fi - -%preun minion - if [ $1 -eq 0 ] ; then - /sbin/service salt-minion stop >/dev/null 2>&1 - /sbin/chkconfig --del salt-minion - fi - -%post master - /sbin/chkconfig --add salt-master - -%post minion - /sbin/chkconfig --add salt-minion - -%postun master - if [ "$1" -ge "1" ] ; then - /sbin/service salt-master condrestart >/dev/null 2>&1 || : - fi - -%postun syndic - if [ "$1" -ge "1" ] ; then - /sbin/service salt-syndic condrestart >/dev/null 2>&1 || : - fi - -%postun minion - if [ "$1" -ge "1" ] ; then - /sbin/service salt-minion condrestart >/dev/null 2>&1 || : - fi - -%else - -%preun master -%if 0%{?systemd_preun:1} - %systemd_preun salt-master.service -%else - if [ $1 -eq 0 ] ; then - # Package removal, not upgrade - /bin/systemctl --no-reload disable salt-master.service > /dev/null 2>&1 || : - /bin/systemctl stop salt-master.service > /dev/null 2>&1 || : - fi -%endif - -%preun syndic -%if 0%{?systemd_preun:1} - %systemd_preun salt-syndic.service -%else - if [ $1 -eq 0 ] ; then - # Package removal, not upgrade - /bin/systemctl --no-reload disable salt-syndic.service > /dev/null 2>&1 || : - /bin/systemctl stop salt-syndic.service > /dev/null 2>&1 || : - fi -%endif - -%preun minion -%if 0%{?systemd_preun:1} - %systemd_preun salt-minion.service -%else - if [ $1 -eq 0 ] ; then - # Package removal, not upgrade - /bin/systemctl --no-reload disable salt-minion.service > /dev/null 2>&1 || : - /bin/systemctl stop salt-minion.service > /dev/null 2>&1 || : - fi -%endif - -%post master -%if 0%{?systemd_post:1} - %systemd_post salt-master.service -%else - /bin/systemctl daemon-reload &>/dev/null || : -%endif - -%post minion -%if 0%{?systemd_post:1} - %systemd_post salt-minion.service -%else - /bin/systemctl daemon-reload &>/dev/null || : -%endif - -%postun master -%if 0%{?systemd_post:1} - %systemd_postun salt-master.service -%else - /bin/systemctl daemon-reload &>/dev/null - [ $1 -gt 0 ] && /bin/systemctl try-restart salt-master.service &>/dev/null || : -%endif - -%postun syndic -%if 0%{?systemd_post:1} - %systemd_postun salt-syndic.service -%else - /bin/systemctl daemon-reload &>/dev/null - [ $1 -gt 0 ] && /bin/systemctl try-restart salt-syndic.service &>/dev/null || : -%endif - -%postun minion -%if 0%{?systemd_post:1} - %systemd_postun salt-minion.service -%else - /bin/systemctl daemon-reload &>/dev/null - [ $1 -gt 0 ] && /bin/systemctl try-restart salt-minion.service &>/dev/null || : -%endif - -%endif diff --git a/tests/pytests/README.md b/tests/pytests/README.md index 8dacd90e797f..0eebf7de1147 100644 --- a/tests/pytests/README.md +++ b/tests/pytests/README.md @@ -11,4 +11,4 @@ shall be used, neither our [customizations to it](../support/case.py). While [PyTest](https://docs.pytest.org) can happily run unittest tests(withough taking advantage of most of PyTest's strengths), this new path in the tests directory was created to provide a clear separation between the two approaches to writing tests. Some(hopefully all) of the existing unittest tests might get ported to PyTest's style of writing tests, new tests should be added under -this directory tree, and, in the long run, this directoy shall become the top level tests directoy. +this directory tree, and, in the long run, this directory shall become the top level tests directory. diff --git a/tests/pytests/conftest.py b/tests/pytests/conftest.py index 49181b9ce56b..79807df80556 100644 --- a/tests/pytests/conftest.py +++ b/tests/pytests/conftest.py @@ -23,6 +23,7 @@ import salt.utils.files import salt.utils.platform from salt.serializers import yaml +from tests.conftest import FIPS_TESTRUN from tests.support.helpers import Webserver, get_virtualenv_binary_path from tests.support.pytest.helpers import TestAccount from tests.support.runtests import RUNTIME_VARS @@ -186,7 +187,10 @@ def salt_master_factory( os.path.join(RUNTIME_VARS.FILES, "returners") ) config_defaults["event_return"] = "runtests_noop" - config_overrides = {"pytest-master": {"log": {"level": "DEBUG"}}} + config_overrides = { + "pytest-master": {"log": {"level": "DEBUG"}}, + "fips_mode": FIPS_TESTRUN, + } ext_pillar = [] if salt.utils.platform.is_windows(): ext_pillar.append( @@ -315,6 +319,7 @@ def salt_minion_factory(salt_master_factory, salt_minion_id, sdb_etcd_port, vaul config_overrides = { "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() @@ -345,6 +350,7 @@ def salt_sub_minion_factory(salt_master_factory, salt_sub_minion_id): config_overrides = { "file_roots": salt_master_factory.config["file_roots"].copy(), "pillar_roots": salt_master_factory.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, } virtualenv_binary = get_virtualenv_binary_path() @@ -547,6 +553,15 @@ def ssl_webserver(integration_files_dir, this_txt_file): yield webserver +@pytest.fixture(scope="module") +def webserver(integration_files_dir, this_txt_file): + """ + spins up an http webserver. + """ + with Webserver(root=str(integration_files_dir)) as webserver: + yield webserver + + # ----- Async Test Fixtures -----------------------------------------------------------------------------------------> # This is based on https://github.com/eukaryote/pytest-tornasync # The reason why we don't use that pytest plugin instead is because it has diff --git a/tests/pytests/functional/cache/test_consul.py b/tests/pytests/functional/cache/test_consul.py index 1156b7aab24a..0a42913b6c29 100644 --- a/tests/pytests/functional/cache/test_consul.py +++ b/tests/pytests/functional/cache/test_consul.py @@ -14,6 +14,7 @@ log = logging.getLogger(__name__) pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.slow_test, pytest.mark.skip_if_binaries_missing("dockerd"), ] @@ -67,5 +68,6 @@ def cache(minion_opts, consul_container): return cache +@pytest.mark.slow_test def test_caching(subtests, cache): run_common_cache_tests(subtests, cache) diff --git a/tests/pytests/functional/channel/test_client.py b/tests/pytests/functional/channel/test_client.py new file mode 100644 index 000000000000..145ad95b771e --- /dev/null +++ b/tests/pytests/functional/channel/test_client.py @@ -0,0 +1,25 @@ +import salt.channel.client +from tests.support.mock import MagicMock, patch + + +async def test_async_pub_channel_connect_cb(minion_opts): + """ + Validate connect_callback closes the request channel it creates. + """ + minion_opts["master_uri"] = "tcp://127.0.0.1:4506" + minion_opts["master_ip"] = "127.0.0.1" + with salt.channel.client.AsyncPubChannel.factory(minion_opts) as channel: + + async def send_id(*args): + return + + channel.send_id = send_id + channel._reconnected = True + + mock = MagicMock(salt.channel.client.AsyncReqChannel) + mock.__enter__ = lambda self: mock + + with patch("salt.channel.client.AsyncReqChannel.factory", return_value=mock): + await channel.connect_callback(None) + mock.send.assert_called_once() + mock.__exit__.assert_called_once() diff --git a/tests/pytests/functional/channel/test_server.py b/tests/pytests/functional/channel/test_server.py index d92a55bf68f9..da6f9caa8c96 100644 --- a/tests/pytests/functional/channel/test_server.py +++ b/tests/pytests/functional/channel/test_server.py @@ -4,7 +4,9 @@ import os import pathlib import shutil +import stat import time +from pathlib import Path import pytest from pytestshellutils.utils import ports @@ -163,6 +165,23 @@ def handle_payload(payload): req_server_channel.post_fork(handle_payload, io_loop=io_loop) + if master_config["transport"] == "zeromq": + time.sleep(1) + attempts = 5 + while True: + try: + p = Path(str(master_config["sock_dir"])) / "workers.ipc" + mode = os.lstat(p).st_mode + assert bool(os.lstat(p).st_mode & stat.S_IRUSR) + assert not bool(os.lstat(p).st_mode & stat.S_IRGRP) + assert not bool(os.lstat(p).st_mode & stat.S_IROTH) + break + except FileNotFoundError as exc: + if not attempts: + raise exc from None + attempts -= 1 + time.sleep(2.5) + pub_channel = salt.channel.client.AsyncPubChannel.factory(minion_config) received = [] diff --git a/tests/pytests/functional/cli/test_salt.py b/tests/pytests/functional/cli/test_salt.py index cc7fa703859a..b3e5e5bd45c8 100644 --- a/tests/pytests/functional/cli/test_salt.py +++ b/tests/pytests/functional/cli/test_salt.py @@ -1,8 +1,39 @@ +import logging import os +import shutil import pytest import salt.version +from tests.conftest import CODE_DIR + +log = logging.getLogger(__name__) + + +@pytest.fixture(autouse=True) +def _install_salt_extension(shell): + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + yield + else: + script_name = "salt-pip" + if salt.utils.platform.is_windows(): + script_name += ".exe" + + script_path = CODE_DIR / "artifacts" / "salt" / script_name + assert script_path.exists() + try: + ret = shell.run( + str(script_path), "install", "salt-analytics-framework==0.1.0" + ) + assert ret.returncode == 0 + log.info(ret) + yield + finally: + ret = shell.run( + str(script_path), "uninstall", "-y", "salt-analytics-framework" + ) + log.info(ret) + shutil.rmtree(script_path.parent / "extras-3.10", ignore_errors=True) @pytest.mark.windows_whitelisted @@ -52,5 +83,25 @@ def test_versions_report(salt_cli): assert key in expected_keys expected_keys.remove(key) assert not expected_keys - if os.environ.get("ONEDIR_TESTRUN", "0") == "1": - assert "relenv" in ret_dict["Dependency Versions"] + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + # Stop any more testing + return + + assert "relenv" in ret_dict["Dependency Versions"] + assert "Salt Extensions" in ret_dict + assert "salt-analytics-framework" in ret_dict["Salt Extensions"] + + +def test_help_log(salt_cli): + """ + Test to ensure when we pass in `--help` the insecure + log warning is included. + """ + ret = salt_cli.run("--help") + count = 0 + stdout = ret.stdout.split("\n") + for line in stdout: + if "sensitive data:" in line: + count += 1 + assert line.strip() == "sensitive data: all, debug, garbage, profile, trace" + assert count == 2 diff --git a/tests/pytests/functional/cli/test_salt_deltaproxy.py b/tests/pytests/functional/cli/test_salt_deltaproxy.py new file mode 100644 index 000000000000..5bc7604c84ab --- /dev/null +++ b/tests/pytests/functional/cli/test_salt_deltaproxy.py @@ -0,0 +1,225 @@ +""" +:codeauthor: Gareth J. Greenaway (ggreenaway@vmware.com) +""" + +import logging +import os +import random + +import pytest +from saltfactories.utils import random_string + +import salt.defaults.exitcodes +from tests.support.helpers import PRE_PYTEST_SKIP_REASON + +log = logging.getLogger(__name__) + + +pytestmark = [ + pytest.mark.skip_on_spawning_platform( + reason="Deltaproxy minions do not currently work on spawning platforms.", + ), + pytest.mark.core_test, +] + + +@pytest.fixture(scope="package") +def salt_master(salt_factories): + config_defaults = { + "open_mode": True, + } + salt_master = salt_factories.salt_master_daemon( + "deltaproxy-functional-master", defaults=config_defaults + ) + with salt_master.started(): + yield salt_master + + +@pytest.fixture(scope="package") +def salt_cli(salt_master): + """ + The ``salt`` CLI as a fixture against the running master + """ + assert salt_master.is_running() + return salt_master.salt_cli(timeout=30) + + +@pytest.fixture(scope="package", autouse=True) +def skip_on_tcp_transport(request): + if request.config.getoption("--transport") == "tcp": + pytest.skip("Deltaproxy under the TPC transport is not working. See #61367") + + +@pytest.fixture +def proxy_minion_id(salt_master): + _proxy_minion_id = random_string("proxy-minion-") + + try: + yield _proxy_minion_id + finally: + # Remove stale key if it exists + pytest.helpers.remove_stale_minion_key(salt_master, _proxy_minion_id) + + +def clear_proxy_minions(salt_master, proxy_minion_id): + for proxy in [proxy_minion_id, "dummy_proxy_one", "dummy_proxy_two"]: + pytest.helpers.remove_stale_minion_key(salt_master, proxy) + + cachefile = os.path.join( + salt_master.config["cachedir"], "{}.cache".format(proxy) + ) + if os.path.exists(cachefile): + os.unlink(cachefile) + + +# Hangs on Windows. You can add a timeout to the proxy.run command, but then +# it just times out. +@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON) +@pytest.mark.parametrize( + "parallel_startup", + [True, False], + ids=["parallel_startup=True", "parallel_startup=False"], +) +def test_exit_status_correct_usage_large_number_of_minions( + salt_master, + salt_cli, + proxy_minion_id, + parallel_startup, +): + """ + Ensure the salt-proxy control proxy starts and + is able to respond to test.ping, additionally ensure that + the proxies being controlled also respond to test.ping. + + Finally ensure correct exit status when salt-proxy exits correctly. + + Skip on Windows because daemonization not supported + """ + + config_defaults = { + "metaproxy": "deltaproxy", + } + sub_proxies = [ + "proxy_one", + "proxy_two", + "proxy_three", + "proxy_four", + "proxy_five", + "proxy_six", + "proxy_seven", + "proxy_eight", + "proxy_nine", + "proxy_ten", + "proxy_eleven", + "proxy_twelve", + "proxy_thirteen", + "proxy_fourteen", + "proxy_fifteen", + "proxy_sixteen", + "proxy_seventeen", + "proxy_eighteen", + "proxy_nineteen", + "proxy_twenty", + "proxy_twenty_one", + "proxy_twenty_two", + "proxy_twenty_three", + "proxy_twenty_four", + "proxy_twenty_five", + "proxy_twenty_six", + "proxy_twenty_seven", + "proxy_twenty_eight", + "proxy_twenty_nine", + "proxy_thirty", + "proxy_thirty_one", + "proxy_thirty_two", + ] + + top_file = """ + base: + {control}: + - controlproxy + """.format( + control=proxy_minion_id, + ) + controlproxy_pillar_file = """ + proxy: + proxytype: deltaproxy + parallel_startup: {} + ids: + """.format( + parallel_startup + ) + + dummy_proxy_pillar_file = """ + proxy: + proxytype: dummy + """ + + for minion_id in sub_proxies: + top_file += """ + {minion_id}: + - dummy""".format( + minion_id=minion_id, + ) + + controlproxy_pillar_file += """ + - {} + """.format( + minion_id, + ) + + top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file) + controlproxy_tempfile = salt_master.pillar_tree.base.temp_file( + "controlproxy.sls", controlproxy_pillar_file + ) + dummy_proxy_tempfile = salt_master.pillar_tree.base.temp_file( + "dummy.sls", + dummy_proxy_pillar_file, + ) + with top_tempfile, controlproxy_tempfile, dummy_proxy_tempfile: + with salt_master.started(): + assert salt_master.is_running() + + factory = salt_master.salt_proxy_minion_daemon( + proxy_minion_id, + defaults=config_defaults, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + start_timeout=240, + ) + + for minion_id in [proxy_minion_id] + sub_proxies: + factory.before_start( + pytest.helpers.remove_stale_proxy_minion_cache_file, + factory, + minion_id, + ) + factory.after_terminate( + pytest.helpers.remove_stale_minion_key, salt_master, minion_id + ) + factory.after_terminate( + pytest.helpers.remove_stale_proxy_minion_cache_file, + factory, + minion_id, + ) + + with factory.started(): + assert factory.is_running() + + # Let's issue a ping the control proxy + ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) + assert ret.returncode == 0 + assert ret.data is True + + for minion_id in random.sample(sub_proxies, 4): + # Let's issue a ping to one of the controlled proxies + ret = salt_cli.run("test.ping", minion_tgt=minion_id) + assert ret.returncode == 0 + assert ret.data is True + + # Terminate the proxy minion + ret = factory.terminate() + assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret + + # Terminate the salt master + ret = salt_master.terminate() + assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret diff --git a/tests/pytests/functional/cli/test_salt_pip.py b/tests/pytests/functional/cli/test_salt_pip.py new file mode 100644 index 000000000000..22284d8488a4 --- /dev/null +++ b/tests/pytests/functional/cli/test_salt_pip.py @@ -0,0 +1,31 @@ +import os + +import pytest + +import salt.scripts +import salt.utils.platform +from tests.conftest import CODE_DIR +from tests.support.mock import patch + + +def test_within_onedir_env(shell): + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + return + + script_name = "salt-pip" + if salt.utils.platform.is_windows(): + script_name += ".exe" + + script_path = CODE_DIR / "artifacts" / "salt" / script_name + assert script_path.exists() + + ret = shell.run(str(script_path), "list") + assert ret.returncode == 0 + + +def test_outside_onedir_env(capsys): + with patch("salt.scripts._get_onedir_env_path", return_value=None): + with pytest.raises(SystemExit) as exc: + salt.scripts.salt_pip() + captured = capsys.readouterr() + assert "'salt-pip' is only meant to be used from a Salt onedir." in captured.err diff --git a/tests/pytests/functional/cli/test_salt_run_.py b/tests/pytests/functional/cli/test_salt_run_.py new file mode 100644 index 000000000000..63841f9fb8c6 --- /dev/null +++ b/tests/pytests/functional/cli/test_salt_run_.py @@ -0,0 +1,85 @@ +import logging +import os + +import salt.version + +log = logging.getLogger(__name__) + + +def test_salt_run_exception_exit(salt_run_cli): + """ + test that the exitcode is 1 when an exception is + thrown in a salt runner + """ + ret = salt_run_cli.run( + "error.error", "name='Exception'", "message='This is an error.'" + ) + assert ret.returncode == 1 + + +def test_salt_run_non_exception_exit(salt_run_cli): + """ + Test standard exitcode and output when runner works. + """ + ret = salt_run_cli.run("test.stdout_print") + assert ret.returncode == 0 + assert ret.stdout == 'foo\n"bar"\n' + + +def test_versions_report(salt_run_cli): + """ + test salt-run --versions-report + """ + expected = salt.version.versions_information() + # sanitize expected of unnnecessary whitespace + for _, section in expected.items(): + for key in section: + if isinstance(section[key], str): + section[key] = section[key].strip() + + ret = salt_run_cli.run("--versions-report") + assert ret.returncode == 0 + assert ret.stdout + ret_lines = ret.stdout.split("\n") + + assert ret_lines + # sanitize lines + ret_lines = [line.strip() for line in ret_lines] + + for header in expected: + assert "{}:".format(header) in ret_lines + + ret_dict = {} + expected_keys = set() + for line in ret_lines: + if not line: + continue + if line.endswith(":"): + assert not expected_keys + current_header = line.rstrip(":") + assert current_header in expected + ret_dict[current_header] = {} + expected_keys = set(expected[current_header].keys()) + else: + key, *value_list = line.split(":", 1) + assert value_list + assert len(value_list) == 1 + value = value_list[0].strip() + if value == "Not Installed": + value = None + ret_dict[current_header][key] = value + assert key in expected_keys + expected_keys.remove(key) + assert not expected_keys + if os.environ.get("ONEDIR_TESTRUN", "0") == "0": + # Stop any more testing + return + + assert "relenv" in ret_dict["Dependency Versions"] + assert "Salt Extensions" in ret_dict + + +def test_salt_run_version(salt_run_cli): + expected = salt.version.__saltstack_version__.formatted_version + ret = salt_run_cli.run("--version") + assert f"cli_salt_run.py {expected}\n" == ret.stdout diff --git a/tests/pytests/functional/grains/test_grains.py b/tests/pytests/functional/grains/test_grains.py new file mode 100644 index 000000000000..0d5619ab5cba --- /dev/null +++ b/tests/pytests/functional/grains/test_grains.py @@ -0,0 +1,23 @@ +import pytest + +pytestmark = [ + pytest.mark.skip_unless_on_linux, + pytest.mark.skipif( + 'grains["os_family"] != "Debian"', + reason="Tests applicable only to Debian and Ubuntu", + ), +] + + +def test_grains(grains): + """ + Test to ensure that the lsb_distrib_xxxx grains are + populated on Debian machines + """ + assert "lsb_distrib_id" in grains + assert "lsb_distrib_release" in grains + assert "lsb_distrib_codename" in grains + + assert grains["lsb_distrib_id"] == grains["osfullname"] + assert grains["lsb_distrib_release"] == grains["osrelease"] + assert grains["lsb_distrib_codename"] == grains["oscodename"] diff --git a/tests/pytests/functional/masterminion/test_masterminion_conf.py b/tests/pytests/functional/masterminion/test_masterminion_conf.py new file mode 100644 index 000000000000..1ce584de192c --- /dev/null +++ b/tests/pytests/functional/masterminion/test_masterminion_conf.py @@ -0,0 +1,41 @@ +""" +Tests for MasterMinion class +""" + +import logging +import os +import pathlib + +import pytest + +import salt.minion + +log = logging.getLogger(__name__) + + +@pytest.fixture(scope="module") +def minion_config_overrides(master_opts): + """Configure minion to use same root_dir and config path as master.""" + root_dir = pathlib.Path(master_opts["root_dir"]) + conf_file = root_dir / "conf" / "minion" + yield {"conf_file": str(conf_file), "root_dir": str(root_dir)} + + +@pytest.fixture +def minion_d_include_value(minion_opts): + """Create minion.d/test.conf with 'minion_d_value' config option.""" + conf_dir = pathlib.Path(minion_opts["conf_file"]).parent + minion_include_dir = (conf_dir / minion_opts["default_include"]).parent + test_conf = minion_include_dir / "test.conf" + os.makedirs(minion_include_dir) + with salt.utils.files.fopen(test_conf, "w") as test_conf: + test_conf.write("minion_d_value: True") + + +def test_issue_64219_masterminion_no_minion_d_include( + master_opts, minion_d_include_value +): + """Create MasterMinion and test it doesn't get config from 'minion.d/*.conf'.""" + + mminion = salt.minion.MasterMinion(master_opts) + assert "minion_d_value" not in mminion.opts diff --git a/tests/pytests/functional/modules/state/requisites/test_listen.py b/tests/pytests/functional/modules/state/requisites/test_listen.py index 03210adc6b8a..35b8c131e225 100644 --- a/tests/pytests/functional/modules/state/requisites/test_listen.py +++ b/tests/pytests/functional/modules/state/requisites/test_listen.py @@ -2,6 +2,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] @@ -352,3 +353,38 @@ def test_onlyif_req(state, subtests): assert ret.result is False assert ret.changes assert ret.comment == "Failure!" + + +def test_listen_requisite_not_exist(state, state_tree): + """ + Tests a simple state using the listen requisite + when the state id does not exist + """ + sls_contents = """ + successful_changing_state: + cmd.run: + - name: echo "Successful Change" + + non_changing_state: + test.succeed_without_changes + + test_listening_change_state: + cmd.run: + - name: echo "Listening State" + - listen: + - cmd: successful_changing_state + + test_listening_non_changing_state: + cmd.run: + - name: echo "Only run once" + - listen: + - test: non_changing_state_not_exist + """ + with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): + ret = state.sls("requisite") + assert ( + ret.raw[ + "Listen_Error_|-listen_non_changing_state_not_exist_|-listen_test_|-Listen_Error" + ]["comment"] + == "Referenced state test: non_changing_state_not_exist does not exist" + ) diff --git a/tests/pytests/functional/modules/state/requisites/test_mixed.py b/tests/pytests/functional/modules/state/requisites/test_mixed.py index 256bb4bea145..f7eb5e4245c4 100644 --- a/tests/pytests/functional/modules/state/requisites/test_mixed.py +++ b/tests/pytests/functional/modules/state/requisites/test_mixed.py @@ -4,8 +4,40 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] +import salt.modules.cmdmod as cmd +import salt.modules.config as config +import salt.modules.grains as grains +import salt.modules.saltutil as saltutil +import salt.modules.state as state_mod + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + state_mod: { + "__opts__": minion_opts, + "__salt__": { + "config.option": config.option, + "config.get": config.get, + "saltutil.is_running": saltutil.is_running, + "grains.get": grains.get, + "cmd.run": cmd.run, + }, + }, + config: { + "__opts__": minion_opts, + }, + saltutil: { + "__opts__": minion_opts, + }, + grains: { + "__opts__": minion_opts, + }, + } + def test_requisites_mixed_require_prereq_use_1(state, state_tree): """ @@ -321,7 +353,6 @@ def test_issue_46762_prereqs_on_a_state_with_unfulfilled_requirements( @pytest.mark.skip_on_darwin(reason="Test is broken on macosx") -@pytest.mark.slow_test def test_issue_30161_unless_and_onlyif_together(state, state_tree, tmp_path): """ test cmd.run using multiple unless options where the first cmd in the @@ -401,3 +432,23 @@ def test_issue_30161_unless_and_onlyif_together(state, state_tree, tmp_path): } for slsid in _expected: assert ret[slsid].comment == _expected[slsid]["comment"] + + +def test_requisites_mixed_illegal_req(state_tree): + """ + Call sls file containing several requisites. + When one of the requisites is illegal. + """ + sls_contents = """ + A: + cmd.run: + - name: echo A + B: + cmd.run: + - name: echo B + - require: + - cmd: ["A"] + """ + with pytest.helpers.temp_file("requisite.sls", sls_contents, state_tree): + ret = state_mod.sls("requisite") + assert ret == ["Illegal requisite \"['A']\", please check your syntax.\n"] diff --git a/tests/pytests/functional/modules/state/requisites/test_onchanges.py b/tests/pytests/functional/modules/state/requisites/test_onchanges.py index b1b1f76c561f..5c3f0c3e3902 100644 --- a/tests/pytests/functional/modules/state/requisites/test_onchanges.py +++ b/tests/pytests/functional/modules/state/requisites/test_onchanges.py @@ -4,6 +4,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] diff --git a/tests/pytests/functional/modules/state/requisites/test_onfail.py b/tests/pytests/functional/modules/state/requisites/test_onfail.py index ad5e89bbeacf..35a509e48ab1 100644 --- a/tests/pytests/functional/modules/state/requisites/test_onfail.py +++ b/tests/pytests/functional/modules/state/requisites/test_onfail.py @@ -4,6 +4,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] diff --git a/tests/pytests/functional/modules/state/requisites/test_onlyif.py b/tests/pytests/functional/modules/state/requisites/test_onlyif.py index eb7af07e38a7..7e85fbe9513c 100644 --- a/tests/pytests/functional/modules/state/requisites/test_onlyif.py +++ b/tests/pytests/functional/modules/state/requisites/test_onlyif.py @@ -2,6 +2,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] diff --git a/tests/pytests/functional/modules/state/requisites/test_prereq.py b/tests/pytests/functional/modules/state/requisites/test_prereq.py index 85850eef2282..546df6f99092 100644 --- a/tests/pytests/functional/modules/state/requisites/test_prereq.py +++ b/tests/pytests/functional/modules/state/requisites/test_prereq.py @@ -4,6 +4,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] diff --git a/tests/pytests/functional/modules/state/requisites/test_require.py b/tests/pytests/functional/modules/state/requisites/test_require.py index 1d1b3d1c4117..b20d9f151abd 100644 --- a/tests/pytests/functional/modules/state/requisites/test_require.py +++ b/tests/pytests/functional/modules/state/requisites/test_require.py @@ -6,6 +6,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] @@ -570,7 +571,6 @@ def test_issue_38683_require_order_failhard_combination(state, state_tree): assert ret[state_id].comment == "Failure!" -@pytest.mark.slow_test @pytest.mark.skip_on_windows def test_parallel_state_with_requires(state, state_tree): """ diff --git a/tests/pytests/functional/modules/state/requisites/test_unless.py b/tests/pytests/functional/modules/state/requisites/test_unless.py index 613ca18c4164..237163da40c1 100644 --- a/tests/pytests/functional/modules/state/requisites/test_unless.py +++ b/tests/pytests/functional/modules/state/requisites/test_unless.py @@ -2,6 +2,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] diff --git a/tests/pytests/functional/modules/state/requisites/test_use.py b/tests/pytests/functional/modules/state/requisites/test_use.py index 6dba56f0698a..0f4c5293423e 100644 --- a/tests/pytests/functional/modules/state/requisites/test_use.py +++ b/tests/pytests/functional/modules/state/requisites/test_use.py @@ -2,6 +2,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] diff --git a/tests/pytests/functional/modules/state/requisites/test_watch.py b/tests/pytests/functional/modules/state/requisites/test_watch.py index 9eff2c10b6d3..4b3bd645fa52 100644 --- a/tests/pytests/functional/modules/state/requisites/test_watch.py +++ b/tests/pytests/functional/modules/state/requisites/test_watch.py @@ -6,6 +6,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] @@ -56,7 +57,6 @@ def test_watch_in_failure(state, state_tree): ) -@pytest.mark.slow_test def test_requisites_watch_any(state, state_tree): """ Call sls file containing several require_in and require. diff --git a/tests/pytests/functional/modules/state/test_jinja_filters.py b/tests/pytests/functional/modules/state/test_jinja_filters.py index 220310aaaf07..38135ac967b1 100644 --- a/tests/pytests/functional/modules/state/test_jinja_filters.py +++ b/tests/pytests/functional/modules/state/test_jinja_filters.py @@ -6,6 +6,7 @@ import attr import pytest +from pytestskipmarkers.utils import platform import salt.utils.files import salt.utils.path @@ -498,6 +499,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="avg_not_list", + expected={"ret": 2.0}, + sls=""" + {% set result = 2 | avg() %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="difference", expected={"ret": [1, 3]}, @@ -509,6 +521,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="difference_hashable", + expected={"ret": [1, 3]}, + sls=""" + {% set result = (1, 2, 3, 4) | difference((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="intersect", expected={"ret": [2, 4]}, @@ -520,6 +543,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="intersect_hashable", + expected={"ret": [2, 4]}, + sls=""" + {% set result = (1, 2, 3, 4) | intersect((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="max", expected={"ret": 4}, @@ -568,6 +602,28 @@ def _filter_id(value): name="regex_match", expected={"ret": "('a', 'd')"}, sls=""" + {% set result = 'abcd' | regex_match('^(.*)bc(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_match_no_match", + expected={"ret": "None"}, + sls=""" + {% set result = 'abcd' | regex_match('^(.*)BC(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_match_ignorecase", + expected={"ret": "('a', 'd')"}, + sls=""" {% set result = 'abcd' | regex_match('^(.*)BC(.*)$', ignorecase=True) %} test: module.run: @@ -575,6 +631,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_match_multiline", + expected={"ret": "('foo1',)"}, + sls=""" + {% set result = 'foo1\nfoo2\n' | regex_match('(foo.$)', multiline=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="regex_replace", expected={"ret": "lets__replace__spaces"}, @@ -586,10 +653,65 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_replace_no_match", + expected={"ret": "lets replace spaces"}, + sls=r""" + {% set result = 'lets replace spaces' | regex_replace('\s+$', '__') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_replace_ignorecase", + expected={"ret": "barbar"}, + sls=r""" + {% set result = 'FOO1foo2' | regex_replace('foo.', 'bar', ignorecase=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_replace_multiline", + expected={"ret": "bar bar "}, + sls=r""" + {% set result = 'FOO1\nfoo2\n' | regex_replace('^foo.$', 'bar', ignorecase=True, multiline=True) %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), Filter( name="regex_search", expected={"ret": "('a', 'd')"}, sls=""" + {% set result = 'abcd' | regex_search('^(.*)bc(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_search_no_match", + expected={"ret": "None"}, + sls=""" + {% set result = 'abcd' | regex_search('^(.*)BC(.*)$') %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="regex_search_ignorecase", + expected={"ret": "('a', 'd')"}, + sls=""" {% set result = 'abcd' | regex_search('^(.*)BC(.*)$', ignorecase=True) %} test: module.run: @@ -597,6 +719,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="regex_search_multiline", + expected={"ret": "('foo1',)"}, + sls=""" + {% set result = 'foo1\nfoo2\n' | regex_search('(foo.$)', multiline=True) %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="sequence", expected={"ret": ["Salt Rocks!"]}, @@ -630,6 +763,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="symmetric_difference_hashable", + expected={"ret": [1, 3, 6]}, + sls=""" + {% set result = (1, 2, 3, 4) | symmetric_difference((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="to_bool", expected={"ret": True}, @@ -641,6 +785,39 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="to_bool_none", + expected={"ret": "False"}, + sls=""" + {% set result = 'None' | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), + Filter( + name="to_bool_given_bool", + expected={"ret": "True"}, + sls=""" + {% set result = true | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), + Filter( + name="to_bool_not_hashable", + expected={"ret": "True"}, + sls=""" + {% set result = ['hello', 'world'] | to_bool() %} + test: + module.run: + - name: test.echo + - text: '{{ result }}' + """, + ), Filter( name="union", expected={"ret": [1, 2, 3, 4, 6]}, @@ -652,6 +829,17 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="union_hashable", + expected={"ret": [1, 2, 3, 4, 6]}, + sls=""" + {% set result = (1, 2, 3, 4) | union((2, 4, 6)) | list %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), Filter( name="unique", expected={"ret": ["a", "b", "c"]}, @@ -928,11 +1116,117 @@ def _filter_id(value): - text: {{ result }} """, ), + Filter( + name="raise", + expected={"ret": {"Question": "Quieres Café?"}}, + sls=""" + {{ raise('Custom Error') }} + """, + ), + Filter( + name="match", + expected={"ret": "match"}, + sls=""" + {% if 'a' is match('[a-b]') %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="no_match", + expected={"ret": "no match"}, + sls=""" + {% if 'c' is match('[a-b]') %} + {% set result = 'match' %} + {% else %} + {% set result = 'no match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="match_ignorecase", + expected={"ret": "match"}, + sls=""" + {% if 'A' is match('[a-b]', True) %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="match_multiline", + expected={"ret": "match"}, + sls=""" + {% set ml_string = 'this is a multiline\nstring' %} + {% if ml_string is match('.*\n^string', False, True) %} + {% set result = 'match' %} + {% else %} + {% set result = 'no_match' %} + {% endif %} + + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="equalto", + expected={"ret": "equal"}, + sls=""" + {% if 1 is equalto(1) %} + {% set result = 'equal' %} + {% else %} + {% set result = 'not equal' %} + {% endif %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), + Filter( + name="un_equalto", + expected={"ret": "not equal"}, + sls=""" + {% if 1 is equalto(2) %} + {% set result = 'equal' %} + {% else %} + {% set result = 'not equal' %} + {% endif %} + test: + module.run: + - name: test.echo + - text: {{ result }} + """, + ), ], ids=_filter_id, ) def filter(request): - return request.param + _filter = request.param + if platform.is_fips_enabled(): + if _filter.name in ("md5", "random_hash"): + pytest.skip("Test cannot run on a FIPS enabled platform") + return _filter def test_filter(state, state_tree, filter, grains): @@ -940,7 +1234,11 @@ def test_filter(state, state_tree, filter, grains): with filter(state_tree): ret = state.sls("filter") log.debug("state.sls returned: %s", ret) - assert not ret.failed - for state_result in ret: - assert state_result.result is True - filter.assert_result(state_result.changes) + if filter.name == "raise": + assert ret.failed + assert "TemplateError" in ret.errors[0] + else: + assert not ret.failed + for state_result in ret: + assert state_result.result is True + filter.assert_result(state_result.changes) diff --git a/tests/pytests/functional/modules/state/test_jinja_renderer.py b/tests/pytests/functional/modules/state/test_jinja_renderer.py index dbd7b9232663..a29020327235 100644 --- a/tests/pytests/functional/modules/state/test_jinja_renderer.py +++ b/tests/pytests/functional/modules/state/test_jinja_renderer.py @@ -2,6 +2,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] diff --git a/tests/pytests/functional/modules/state/test_mako_renderer.py b/tests/pytests/functional/modules/state/test_mako_renderer.py new file mode 100644 index 000000000000..83a11df751c8 --- /dev/null +++ b/tests/pytests/functional/modules/state/test_mako_renderer.py @@ -0,0 +1,23 @@ +import pytest + +pytestmark = [ + pytest.mark.windows_whitelisted, +] + + +def test_mako_renderer(state, state_tree): + """ + Test mako renderer when running state.sls + """ + sls_contents = """ + #!mako|yaml + %for a in [1,2,3]: + echo ${a}: + cmd.run + %endfor + """ + with pytest.helpers.temp_file("issue-55124.sls", sls_contents, state_tree): + ret = state.sls("issue-55124") + for state_return in ret: + assert state_return.result is True + assert "echo" in state_return.id diff --git a/tests/pytests/functional/modules/state/test_pyobjects_renderer.py b/tests/pytests/functional/modules/state/test_pyobjects_renderer.py new file mode 100644 index 000000000000..a1afa06b65ba --- /dev/null +++ b/tests/pytests/functional/modules/state/test_pyobjects_renderer.py @@ -0,0 +1,37 @@ +import pytest + +pytestmark = [ + pytest.mark.windows_whitelisted, +] + + +def test_pyobjects_renderer(state, state_tree, tmp_path): + """ + Test pyobjects renderer when running state.sls + """ + file_path = str(tmp_path).replace("\\", "/") + sls1_contents = f""" + #!pyobjects + import pathlib + import salt://test_pyobjects2.sls + test_file = pathlib.Path("{file_path}", "test") + File.managed(str(test_file)) + """ + sls2_contents = f""" + #!pyobjects + import pathlib + test_file = pathlib.Path("{file_path}", "test2") + File.managed(str(test_file)) + """ + + with pytest.helpers.temp_file( + "test_pyobjects.sls", sls1_contents, state_tree + ) as state1: + with pytest.helpers.temp_file( + "test_pyobjects2.sls", sls2_contents, state_tree + ) as state2: + ret = state.sls("test_pyobjects") + assert not ret.errors + for state_return in ret: + assert state_return.result is True + assert str(tmp_path) in state_return.name diff --git a/tests/pytests/functional/modules/state/test_state.py b/tests/pytests/functional/modules/state/test_state.py index 3b88bfaeadc1..37f60b07dba5 100644 --- a/tests/pytests/functional/modules/state/test_state.py +++ b/tests/pytests/functional/modules/state/test_state.py @@ -7,10 +7,16 @@ import pytest import salt.loader +import salt.modules.cmdmod as cmd +import salt.modules.config as config +import salt.modules.grains as grains +import salt.modules.saltutil as saltutil +import salt.modules.state as state_mod import salt.utils.atomicfile import salt.utils.files import salt.utils.path import salt.utils.platform +import salt.utils.state as state_util import salt.utils.stringutils log = logging.getLogger(__name__) @@ -18,9 +24,36 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.core_test, ] +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + state_mod: { + "__opts__": minion_opts, + "__salt__": { + "config.option": config.option, + "config.get": config.get, + "saltutil.is_running": saltutil.is_running, + "grains.get": grains.get, + "cmd.run": cmd.run, + }, + "__utils__": {"state.check_result": state_util.check_result}, + }, + config: { + "__opts__": minion_opts, + }, + saltutil: { + "__opts__": minion_opts, + }, + grains: { + "__opts__": minion_opts, + }, + } + + def _check_skip(grains): if grains["os"] == "SUSE": return True @@ -649,7 +682,6 @@ def test_issues_7905_and_8174_sls_syntax_error(state, state_tree): assert ret.errors == ["State 'C' in SLS 'badlist2' is not formed as a list"] -@pytest.mark.slow_test def test_retry_option(state, state_tree): """ test the retry option on a simple state with defaults @@ -751,7 +783,6 @@ def test_retry_option_success_parallel(state, state_tree, tmp_path): assert "Attempt 2" not in state_return.comment -@pytest.mark.slow_test def test_retry_option_eventual_success(state, state_tree, tmp_path): """ test a state with the retry option that should return True, eventually @@ -799,7 +830,6 @@ def create_testfile(testfile1, testfile2): @pytest.mark.skip_on_windows( reason="Skipped until parallel states can be fixed on Windows" ) -@pytest.mark.slow_test def test_retry_option_eventual_success_parallel(state, state_tree, tmp_path): """ test a state with the retry option that should return True, eventually @@ -848,7 +878,6 @@ def create_testfile(testfile1, testfile2): assert "Attempt 5" not in state_return.comment -@pytest.mark.slow_test def test_state_non_base_environment(state, state_tree_prod, tmp_path): """ test state.sls with saltenv using a nonbase environment @@ -1009,3 +1038,48 @@ def test_issue_62264_requisite_not_found(state, state_tree): for state_return in ret: assert state_return.result is True assert "The following requisites were not found" not in state_return.comment + + +def test_state_sls_defaults(state, state_tree): + """ """ + json_contents = """ + { + "users": { + "root": 1 + } + } + """ + sls_contents = """ + {% set test = salt['defaults.get']('core:users:root') %} + + echo {{ test }}: + cmd.run + """ + state_id = "cmd_|-echo 1_|-echo 1_|-run" + core_dir = state_tree / "core" + with pytest.helpers.temp_file( + core_dir / "defaults.json", json_contents, state_tree + ): + with pytest.helpers.temp_file(core_dir / "test.sls", sls_contents, state_tree): + ret = state.sls("core.test") + assert state_id in ret + for state_return in ret: + assert state_return.result is True + assert "echo 1" in state_return.comment + + +def test_state_sls_mock_ret(state_tree): + """ + test state.sls when mock=True is passed + """ + sls_contents = """ + echo1: + cmd.run: + - name: "echo 'This is a test!'" + """ + with pytest.helpers.temp_file("mock.sls", sls_contents, state_tree): + ret = state_mod.sls("mock", mock=True) + assert ( + ret["cmd_|-echo1_|-echo 'This is a test!'_|-run"]["comment"] + == "Not called, mocked" + ) diff --git a/tests/pytests/functional/modules/test_ansiblegate.py b/tests/pytests/functional/modules/test_ansiblegate.py index 08dc32e36898..0c51b6227091 100644 --- a/tests/pytests/functional/modules/test_ansiblegate.py +++ b/tests/pytests/functional/modules/test_ansiblegate.py @@ -9,6 +9,7 @@ check_all=True, reason="ansible is not installed", ), + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/modules/test_aptpkg.py b/tests/pytests/functional/modules/test_aptpkg.py index 45b748d89492..7e086e498e2f 100644 --- a/tests/pytests/functional/modules/test_aptpkg.py +++ b/tests/pytests/functional/modules/test_aptpkg.py @@ -17,6 +17,7 @@ pytestmark = [ pytest.mark.skip_if_binaries_missing("apt-cache", "grep"), + pytest.mark.slow_test, ] KEY_FILES = ( @@ -97,6 +98,38 @@ def revert_repo_file(tmp_path): aptpkg.refresh_db() +@pytest.fixture +def build_repo_file(): + source_path = "/etc/apt/sources.list.d/source_test_list.list" + try: + test_repos = [ + "deb [signed-by=/etc/apt/keyrings/salt-archive-keyring-2023.gpg arch=amd64] https://repo.saltproject.io/salt/py3/ubuntu/22.04/amd64/latest jammy main", + "deb http://dist.list stable/all/", + ] + with salt.utils.files.fopen(source_path, "w+") as fp: + fp.write("\n".join(test_repos)) + yield source_path + finally: + if os.path.exists(source_path): + os.remove(source_path) + + +def get_repos_from_file(source_path): + """ + Get list of repos from repo in source_path + """ + test_repos = [] + try: + with salt.utils.files.fopen(source_path) as fp: + for line in fp: + test_repos.append(line.strip()) + except FileNotFoundError as error: + pytest.skip("Missing {}".format(error.filename)) + if not test_repos: + pytest.skip("Did not detect an APT repo") + return test_repos + + def get_current_repo(multiple_comps=False): """ Get a repo currently in sources.list @@ -194,18 +227,20 @@ def test_get_repos_doesnot_exist(): @pytest.mark.destructive_test -def test_del_repo(revert_repo_file): +@pytest.mark.skip_if_not_root +def test_del_repo(build_repo_file): """ Test aptpkg.del_repo when passing repo that exists. And checking correct error is returned when it no longer exists. """ - test_repo, comps = get_current_repo() - ret = aptpkg.del_repo(repo=test_repo) - assert "Repo '{}' has been removed".format(test_repo) - with pytest.raises(salt.exceptions.CommandExecutionError) as exc: + test_repos = get_repos_from_file(build_repo_file) + for test_repo in test_repos: ret = aptpkg.del_repo(repo=test_repo) - assert "Repo {} doesn't exist".format(test_repo) in exc.value.message + assert "Repo '{}' has been removed".format(test_repo) + with pytest.raises(salt.exceptions.CommandExecutionError) as exc: + ret = aptpkg.del_repo(repo=test_repo) + assert "Repo {} doesn't exist".format(test_repo) in exc.value.message @pytest.mark.skipif( @@ -241,6 +276,7 @@ def test__expand_repo_def(grains): @pytest.mark.destructive_test +@pytest.mark.skip_if_not_root def test_mod_repo(revert_repo_file): """ Test aptpkg.mod_repo when the repo exists. @@ -255,6 +291,7 @@ def test_mod_repo(revert_repo_file): @pytest.mark.destructive_test +@pytest.mark.skip_if_not_root def test_mod_repo_no_file(tmp_path, revert_repo_file): """ Test aptpkg.mod_repo when the file does not exist. @@ -283,6 +320,7 @@ def add_key(request, get_key_file): @pytest.mark.parametrize("get_key_file", KEY_FILES, indirect=True) @pytest.mark.parametrize("add_key", [False, True], indirect=True) @pytest.mark.destructive_test +@pytest.mark.skip_if_not_root def test_get_repo_keys(get_key_file, add_key): """ Test aptpkg.get_repo_keys when aptkey is False and True @@ -296,6 +334,7 @@ def test_get_repo_keys(get_key_file, add_key): @pytest.mark.parametrize("key", [False, True]) @pytest.mark.destructive_test +@pytest.mark.skip_if_not_root def test_get_repo_keys_keydir_not_exist(key): """ Test aptpkg.get_repo_keys when aptkey is False and True diff --git a/tests/pytests/functional/modules/test_defaults.py b/tests/pytests/functional/modules/test_defaults.py new file mode 100644 index 000000000000..ce4a57753a1d --- /dev/null +++ b/tests/pytests/functional/modules/test_defaults.py @@ -0,0 +1,40 @@ +import pytest + +pytestmark = [pytest.mark.skip_unless_on_linux] + + +@pytest.fixture(scope="module") +def defaults(modules): + return modules.defaults + + +def test_defaults_get(defaults, state_tree, caplog): + """ + test defaults.get + """ + + json_contents = """ + { + "users": { + "root": 1 + } + } + """ + path = state_tree / "core" + with pytest.helpers.temp_file("defaults.json", json_contents, path): + assert defaults.get("core:users:root") == 1 + + +def test_defaults_merge(defaults): + """ + test defaults.merge + """ + assert defaults.merge({"a": "b"}, {"d": "e"}) == {"a": "b", "d": "e"} + + +def test_defaults_deepcopy(defaults): + """ + test defaults.deepcopy + """ + test_dict = {"1": "one"} + assert defaults.deepcopy(test_dict) == test_dict diff --git a/tests/pytests/functional/modules/test_etcd_mod.py b/tests/pytests/functional/modules/test_etcd_mod.py index e7e6a586ac9b..e1e1b552df22 100644 --- a/tests/pytests/functional/modules/test_etcd_mod.py +++ b/tests/pytests/functional/modules/test_etcd_mod.py @@ -14,6 +14,7 @@ pytestmark = [ pytest.mark.skip_if_binaries_missing("docker", "dockerd", check_all=False), + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/modules/test_http.py b/tests/pytests/functional/modules/test_http.py new file mode 100644 index 000000000000..614ca71b0cbb --- /dev/null +++ b/tests/pytests/functional/modules/test_http.py @@ -0,0 +1,34 @@ +import pytest + +import salt.modules.http as http + + +@pytest.fixture +def configure_loader_modules(): + return {http: {}} + + +def test_query_error_tornado(webserver): + """ + Ensure we return expected data when website does not return + a 200 with tornado + """ + url = webserver.url("doesnotexist") + ret = http.query(url, backend="tornado") + assert ( + ret["body"] + == "404: Not Found404: Not Found" + ) + assert ret["error"] == "HTTP 404: Not Found" + assert ret["status"] == 404 + + +@pytest.mark.parametrize("backend", ["requests", "urllib2", "tornado"]) +def test_query_success(webserver, backend): + """ + Ensure we return a success when querying + a website + """ + url = webserver.url("this.txt") + ret = http.query(url, backend=backend) + assert ret == {"body": "test"} diff --git a/tests/pytests/functional/modules/test_mac_assistive.py b/tests/pytests/functional/modules/test_mac_assistive.py new file mode 100644 index 000000000000..d2b6808b56d0 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_assistive.py @@ -0,0 +1,105 @@ +""" + :codeauthor: Nicole Thomas +""" + +import pytest + +from salt.exceptions import CommandExecutionError + +pytestmark = [ + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def assistive(modules): + return modules.assistive + + +@pytest.fixture +def osa_script(assistive): + osa_script_path = "/usr/bin/osascript" + try: + ret = assistive.install(osa_script_path, True) + yield osa_script_path + except CommandExecutionError as exc: + pytest.skip(f"Unable to install {osa_script}: {exc}") + finally: + osa_script_ret = assistive.installed(osa_script_path) + if osa_script_ret: + assistive.remove(osa_script_path) + + +@pytest.fixture +def install_remove_pkg_name(assistive): + smile_bundle = "com.smileonmymac.textexpander" + try: + yield smile_bundle + finally: + smile_bundle_present = assistive.installed(smile_bundle) + if smile_bundle_present: + assistive.remove(smile_bundle) + + +@pytest.mark.slow_test +def test_install_and_remove(assistive, install_remove_pkg_name): + """ + Tests installing and removing a bundled ID or command to use assistive access. + """ + ret = assistive.install(install_remove_pkg_name) + assert ret + ret = assistive.remove(install_remove_pkg_name) + assert ret + + +@pytest.mark.slow_test +def test_installed(assistive, osa_script): + """ + Tests the True and False return of assistive.installed. + """ + # OSA script should have been installed in _setup_teardown_vars function + ret = assistive.installed(osa_script) + assert ret + # Clean up install + assistive.remove(osa_script) + # Installed should now return False + ret = assistive.installed(osa_script) + assert not ret + + +@pytest.mark.slow_test +def test_enable(assistive, osa_script): + """ + Tests setting the enabled status of a bundled ID or command. + """ + # OSA script should have been installed and enabled in _setup_teardown_vars function + # Now let's disable it, which should return True. + ret = assistive.enable(osa_script, False) + assert ret + # Double check the script was disabled, as intended. + ret = assistive.enabled(osa_script) + assert not ret + # Now re-enable + ret = assistive.enable(osa_script) + assert ret + # Double check the script was enabled, as intended. + ret = assistive.enabled(osa_script) + assert ret + + +@pytest.mark.slow_test +def test_enabled(assistive, osa_script): + """ + Tests if a bundled ID or command is listed in assistive access returns True. + """ + # OSA script should have been installed in _setup_teardown_vars function, which sets + # enabled to True by default. + ret = assistive.enabled(osa_script) + assert ret + # Disable OSA Script + assistive.enable(osa_script, False) + # Assert against new disabled status + ret = assistive.enabled(osa_script) + assert not ret diff --git a/tests/pytests/functional/modules/test_mac_brew_pkg.py b/tests/pytests/functional/modules/test_mac_brew_pkg.py new file mode 100644 index 000000000000..ae6fe9971bf6 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_brew_pkg.py @@ -0,0 +1,132 @@ +""" + :codeauthor: Nicole Thomas + :codeauthor: Gareth J. Greenaway +""" + +import pytest + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, + pytest.mark.skip_if_binaries_missing("brew"), +] + + +@pytest.fixture(scope="module") +def pkg(modules): + return modules.pkg + + +@pytest.fixture +def pkg_1_name(pkg): + pkg_name = "algol68g" + try: + yield pkg_name + finally: + pkg_list = pkg.list_pkgs() + + # Remove package if installed + if pkg_name in pkg_list: + pkg.remove(pkg_name) + + +@pytest.fixture +def pkg_2_name(pkg): + pkg_name = "acme" + try: + pkg.install(pkg_name) + pkg_list = pkg.list_pkgs() + if pkg_name not in pkg_list: + pytest.skip(f"Failed to install the '{pkg_name}' package to delete") + yield pkg_name + finally: + pkg_list = pkg.list_pkgs() + + # Remove package if still installed + if pkg_name in pkg_list: + pkg.remove(pkg_name) + + +def test_brew_install(pkg, pkg_1_name): + """ + Tests the installation of packages + """ + pkg.install(pkg_1_name) + pkg_list = pkg.list_pkgs() + assert pkg_1_name in pkg_list + + +def test_remove(pkg, pkg_2_name): + """ + Tests the removal of packages + """ + pkg.remove(pkg_2_name) + pkg_list = pkg.list_pkgs() + assert pkg_2_name not in pkg_list + + +def test_version(pkg, pkg_1_name): + """ + Test pkg.version for mac. Installs a package and then checks we can get + a version for the installed package. + """ + pkg.install(pkg_1_name) + pkg_list = pkg.list_pkgs() + version = pkg.version(pkg_1_name) + assert version + assert pkg_1_name in pkg_list + # make sure the version is accurate and is listed in the pkg_list + assert version in str(pkg_list[pkg_1_name]) + + +def test_latest_version(pkg, pkg_1_name): + """ + Test pkg.latest_version: + - get the latest version available + - install the package + - get the latest version available + - check that the latest version is empty after installing it + """ + pkg.remove(pkg_1_name) + uninstalled_latest = pkg.latest_version(pkg_1_name) + + pkg.install(pkg_1_name) + installed_latest = pkg.latest_version(pkg_1_name) + version = pkg.version(pkg_1_name) + assert isinstance(uninstalled_latest, str) + assert installed_latest == version + + +def test_refresh_db(pkg): + """ + Integration test to ensure pkg.refresh_db works with brew + """ + refresh_brew = pkg.refresh_db() + assert refresh_brew + + +def test_list_upgrades(pkg, pkg_1_name): + """ + Test pkg.list_upgrades: data is in the form {'name1': 'version1', 'name2': 'version2', ... } + """ + upgrades = pkg.list_upgrades() + assert isinstance(upgrades, dict) + if upgrades: + for name in upgrades: + assert isinstance(name, str) + assert isinstance(upgrades[name], str) + + +def test_info_installed(pkg, pkg_1_name): + """ + Test pkg.info_installed: info returned has certain fields used by + mac_brew.latest_version + """ + pkg.install(pkg_1_name) + info = pkg.info_installed(pkg_1_name) + assert pkg_1_name in info + assert "versions" in info[pkg_1_name] + assert "revision" in info[pkg_1_name] + assert "stable" in info[pkg_1_name]["versions"] diff --git a/tests/pytests/functional/modules/test_mac_desktop.py b/tests/pytests/functional/modules/test_mac_desktop.py new file mode 100644 index 000000000000..7de6744adc06 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_desktop.py @@ -0,0 +1,74 @@ +""" +Integration tests for the mac_desktop execution module. +""" + +import pytest + +from salt.exceptions import CommandExecutionError + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def desktop(modules): + return modules.desktop + + +def test_get_output_volume(desktop): + """ + Tests the return of get_output_volume. + """ + ret = desktop.get_output_volume() + assert ret is not None + + +def test_set_output_volume(desktop): + """ + Tests the return of set_output_volume. + """ + current_vol = desktop.get_output_volume() + try: + to_set = 10 + if current_vol == str(to_set): + to_set += 2 + new_vol = desktop.set_output_volume(str(to_set)) + check_vol = desktop.get_output_volume() + assert new_vol == check_vol + finally: + # Set volume back to what it was before + desktop.set_output_volume(current_vol) + + +def test_screensaver(desktop): + """ + Tests the return of the screensaver function. + """ + try: + ret = desktop.screensaver() + except CommandExecutionError as exc: + pytest.skip("Skipping. Screensaver unavailable.") + assert ret + + +def test_lock(desktop): + """ + Tests the return of the lock function. + """ + try: + ret = desktop.lock() + except CommandExecutionError as exc: + pytest.skip("Skipping. Unable to lock screen.") + assert ret + + +def test_say(desktop): + """ + Tests the return of the say function. + """ + ret = desktop.say("hello", "world") + assert ret diff --git a/tests/pytests/functional/modules/test_mac_group.py b/tests/pytests/functional/modules/test_mac_group.py new file mode 100644 index 000000000000..2f88943d979f --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_group.py @@ -0,0 +1,176 @@ +""" + :codeauthor: Nicole Thomas +""" + +import pytest +from saltfactories.utils import random_string + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def group(modules): + return modules.group + + +# Create group name strings for tests +@pytest.fixture(scope="module") +def non_existing_group_name(group): + group_name = random_string("group-", lowercase=False) + try: + yield group_name + finally: + # Delete the added group + group_info = group.info(group_name) + if group_info: + group.delete(group_name) + + +@pytest.fixture(scope="module") +def existing_group_name(group): + group_name = random_string("group-", lowercase=False) + try: + ret = group.add(group_name, 4567) + if ret is not True: + pytest.skip("Failed to create a group to delete") + yield group_name + finally: + # Delete the added group + group_info = group.info(group_name) + if group_info: + group.delete(group_name) + + +@pytest.fixture(scope="module") +def non_existing_user(group): + group_name = random_string("user-", lowercase=False) + try: + yield group_name + finally: + # Delete the added group + group_info = group.info(group_name) + if group_info: + group.delete(group_name) + + +@pytest.fixture(scope="module") +def existing_user(group, existing_group_name): + group_name = random_string("user-", lowercase=False) + try: + ret = group.adduser(existing_group_name, group_name) + if ret is not True: + pytest.skip("Failed to create an existing group member") + yield group_name + finally: + # Delete the added group + group_info = group.info(group_name) + if group_info: + group.delete(group_name) + + +@pytest.fixture(scope="module") +def rep_user_group(): + yield random_string("RS-", lowercase=False) + + +@pytest.fixture(scope="module") +def non_existing_group_member(group): + group_name = random_string("user-", lowercase=False) + try: + yield group_name + finally: + # Delete the added group + group_info = group.info(group_name) + if group_info: + group.delete(group_name) + + +def test_mac_group_add(group, non_existing_group_name): + """ + Tests the add group function + """ + group.add(non_existing_group_name, 3456) + group_info = group.info(non_existing_group_name) + assert group_info["name"] == non_existing_group_name + + +def test_mac_group_delete(group, existing_group_name): + """ + Tests the delete group function + """ + ret = group.delete(existing_group_name) + assert ret + + +def test_mac_group_chgid(group, existing_group_name): + """ + Tests changing the group id + """ + gid = 6789 + group_info = group.info(existing_group_name) + assert group_info["gid"] != gid + group.chgid(existing_group_name, gid) + group_info = group.info(existing_group_name) + assert group_info["gid"] == gid + + +def test_mac_adduser(group, non_existing_group_name, non_existing_user): + """ + Tests adding user to the group + """ + # Create a group to use for test - If unsuccessful, skip the test + ret = group.add(non_existing_group_name, 5678) + if ret is not True: + group.delete(non_existing_group_name) + pytest.skip("Failed to create a group to manipulate") + + group.adduser(non_existing_group_name, non_existing_user) + group_info = group.info(non_existing_group_name) + assert non_existing_user in group_info["members"] + assert group_info["members"] == [non_existing_user] + + +def test_mac_deluser(group, existing_group_name, existing_user): + """ + Test deleting user from a group + """ + delusr = group.deluser(existing_group_name, existing_user) + assert delusr + + group_info = group.info(existing_group_name) + assert existing_user not in group_info["members"] + + +def test_mac_members( + group, existing_group_name, existing_user, non_existing_group_member +): + """ + Test replacing members of a group + """ + group_info = group.info(existing_group_name) + assert non_existing_group_member not in group_info["members"] + assert non_existing_user in group_info["members"] + + # Replace group members + rep_group_mem = group.members(existing_group_name, non_existing_group_member) + assert rep_group_mem + + # ensure new user is added to group and previous user is removed + group_info = group.info(existing_group_name) + assert non_existing_group_member in group_info["members"] + assert non_existing_user not in group_info["members"] + + +def test_mac_getent(group, existing_user, existing_group_name): + """ + Test returning info on all groups + """ + getinfo = group.getent() + assert getinfo + assert existing_group_name in str(getinfo) + assert existing_user in str(getinfo) diff --git a/tests/pytests/functional/modules/test_mac_keychain.py b/tests/pytests/functional/modules/test_mac_keychain.py new file mode 100644 index 000000000000..eb67304487d2 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_keychain.py @@ -0,0 +1,129 @@ +""" +Validate the mac-keychain module +""" + +import os + +import pytest + +import salt.utils.versions +from tests.support.runtests import RUNTIME_VARS + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def cmd(modules): + return modules.cmd + + +@pytest.fixture(scope="module") +def keychain(modules): + return modules.keychain + + +@pytest.fixture(scope="function", autouse=True) +def setup_teardown_vars(keychain, base_env_state_tree_root_dir): + cert = os.path.join(RUNTIME_VARS.FILES, "file", "base", "certs", "salttest.p12") + cert_alias = "Salt Test" + passwd = "salttest" + + try: + yield cert, cert_alias, passwd + finally: + certs_list = keychain.list_certs() + if cert_alias in certs_list: + keychain.uninstall(cert_alias) + + +def test_mac_keychain_install(keychain, setup_teardown_vars): + """ + Tests that attempts to install a certificate + """ + + cert = setup_teardown_vars[0] + cert_alias = setup_teardown_vars[1] + passwd = setup_teardown_vars[2] + + install_cert = keychain.install(cert, passwd) + assert install_cert + assert install_cert == "1 identity imported." + + # check to ensure the cert was installed + certs_list = keychain.list_certs() + assert cert_alias in certs_list + + +def test_mac_keychain_uninstall(keychain, setup_teardown_vars): + """ + Tests that attempts to uninstall a certificate + """ + + cert = setup_teardown_vars[0] + cert_alias = setup_teardown_vars[1] + passwd = setup_teardown_vars[2] + + keychain.install(cert, passwd) + certs_list = keychain.list_certs() + + if cert_alias not in certs_list: + keychain.uninstall(cert_alias) + pytest.skip("Failed to install keychain") + + # uninstall cert + keychain.uninstall(cert_alias) + certs_list = keychain.list_certs() + + # check to ensure the cert was uninstalled + assert cert_alias not in str(certs_list) + + +@pytest.mark.skip_if_binaries_missing("openssl") +def test_mac_keychain_get_friendly_name(keychain, shell, setup_teardown_vars): + """ + Test that attempts to get friendly name of a cert + """ + cert = setup_teardown_vars[0] + cert_alias = setup_teardown_vars[1] + passwd = setup_teardown_vars[2] + + keychain.install(cert, passwd) + certs_list = keychain.list_certs() + if cert_alias not in certs_list: + keychain.uninstall(cert_alias) + pytest.skip("Failed to install keychain") + + ret = shell.run("openssl", "version") + assert ret.stdout + openssl_version = ret.stdout.split()[1] + + # openssl versions under 3.0.0 do not include legacy flag + if salt.utils.versions.compare(ver1=openssl_version, oper="<", ver2="3.0.0"): + get_name = keychain.get_friendly_name(cert, passwd, legacy=False) + else: + get_name = keychain.get_friendly_name(cert, passwd, legacy=True) + + assert get_name == cert_alias + + +def test_mac_keychain_get_default_keychain(keychain, cmd, setup_teardown_vars): + """ + Test that attempts to get the default keychain + """ + sys_get_keychain = keychain.get_default_keychain() + salt_get_keychain = cmd.run("security default-keychain -d user") + assert salt_get_keychain == sys_get_keychain + + +def test_mac_keychain_list_certs(keychain, setup_teardown_vars): + """ + Test that attempts to list certs + """ + cert_default = "com.apple.systemdefault" + certs = keychain.list_certs() + assert cert_default in certs diff --git a/tests/pytests/functional/modules/test_mac_pkgutil.py b/tests/pytests/functional/modules/test_mac_pkgutil.py index 5ce546d776c1..02bb2e5641f8 100644 --- a/tests/pytests/functional/modules/test_mac_pkgutil.py +++ b/tests/pytests/functional/modules/test_mac_pkgutil.py @@ -56,6 +56,10 @@ def macports_package_url(macports_package_filename): @pytest.fixture(scope="module") def pkg_name(grains): + if grains["osrelease_info"][0] >= 13: + return "com.apple.pkg.CLTools_SDK_macOS13" + if grains["osrelease_info"][0] >= 12: + return "com.apple.pkg.XcodeSystemResources" if grains["osrelease_info"][0] >= 11: return "com.apple.pkg.InstallAssistant.macOSBigSur" if grains["osrelease_info"][:2] == (10, 15): diff --git a/tests/pytests/functional/modules/test_mac_portspkg.py b/tests/pytests/functional/modules/test_mac_portspkg.py new file mode 100644 index 000000000000..419af432c529 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_portspkg.py @@ -0,0 +1,118 @@ +""" +integration tests for mac_ports +""" + +import pytest + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, + pytest.mark.skip_if_binaries_missing("port"), +] + + +@pytest.fixture(scope="module") +def pkg(modules): + return modules.pkg + + +@pytest.fixture +def uninstalled_pkg_name(pkg): + pkgname = installed_pkg_name + try: + pkg.refresh_db() + yield pkgname + finally: + if pkgname in pkg.list_pkgs(): + pkg.remove(pkgname) + + +@pytest.fixture +def installed_pkg_name(uninstalled_pkg_name): + pkg.install(uninstalled_pkg_name) + return uninstalled_pkg_name + + +@pytest.fixture(scope="function", autouse=True) +def _setup_teardown_vars(pkg): + AGREE_INSTALLED = False + try: + ret = pkg.list_pkgs() + AGREE_INSTALLED = installed_pkg_name in ret + pkg.refresh_db() + yield + finally: + if AGREE_INSTALLED: + pkg.remove(installed_pkg_name) + + +def test_list_pkgs(pkg, installed_pkg_name): + """ + Test pkg.list_pkgs + """ + pkg_list_ret = pkg.list_pkgs() + assert isinstance(pkg_list_ret, dict) + assert installed_pkg_name in pkg_list_ret + + +def test_latest_version(pkg, installed_pkg_name): + """ + Test pkg.latest_version + """ + result = pkg.latest_version(installed_pkg_name, refresh=False) + assert isinstance(result, dict) + assert installed_pkg_name in result.data + + +def test_remove(pkg, installed_pkg_name): + """ + Test pkg.remove + """ + ret = pkg.remove(installed_pkg_name) + assert isinstance(ret, dict) + assert installed_pkg_name in ret + + +@pytest.mark.destructive_test +def test_install(pkg, uninstalled_pkg_name): + """ + Test pkg.install + """ + ret = pkg.install(uninstalled_pkg_name) + assert isinstance(ret, dict) + assert uninstalled_pkg_name in ret + + +def test_list_upgrades_type(pkg): + """ + Test pkg.list_upgrades return type + """ + ret = pkg.list_upgrades(refresh=False) + assert isinstance(ret, dict) + + +def test_upgrade_available(pkg, installed_pkg_name): + """ + Test pkg.upgrade_available + """ + ret = pkg.upgrade_available(installed_pkg_name, refresh=False) + assert not ret.data + + +def test_refresh_db(pkg): + """ + Test pkg.refresh_db + """ + ret = pkg.refresh_db() + assert ret + + +def test_upgrade(pkg): + """ + Test pkg.upgrade + """ + ret = pkg.upgrade(refresh=False) + assert isinstance(ret, dict) + assert ret.data["result"] diff --git a/tests/pytests/functional/modules/test_mac_power.py b/tests/pytests/functional/modules/test_mac_power.py new file mode 100644 index 000000000000..fc444c222326 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_power.py @@ -0,0 +1,339 @@ +""" +integration tests for mac_power +""" + +import pytest + +from salt.exceptions import CommandExecutionError, SaltInvocationError + +pytestmark = [ + pytest.mark.skip_if_binaries_missing("systemsetup"), + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def power(modules): + return modules.power + + +@pytest.fixture +def _reset_computer_sleep(power): + ret = power.get_computer_sleep() + try: + yield + finally: + power.set_computer_sleep(ret) + + +@pytest.fixture +def _reset_display_sleep(power): + ret = power.get_display_sleep() + try: + yield + finally: + power.set_display_sleep(ret) + + +@pytest.fixture +def _reset_harddisk_sleep(power): + ret = power.get_harddisk_sleep() + try: + yield + finally: + power.set_harddisk_sleep(ret) + + +@pytest.fixture +def _reset_restart_power_failure(power): + try: + ret = power.get_restart_power_failure() + if not isinstance(ret, bool): + assert "Error" in ret + pytest.skip(f"Error while calling `get_restart_power_failure()`: {ret}") + except CommandExecutionError as exc: + if "Not supported on this machine" in str(exc): + pytest.skip("Restart After Power Failure: Not supported on this machine.") + try: + yield + finally: + if isinstance(ret, bool): + if ret: + ret = power.set_restart_power_failure("On") + assert ret + else: + ret = power.set_restart_power_failure("Off") + assert ret + + +@pytest.fixture +def _reset_sleep_on_power_button(power): + try: + ret = power.get_sleep_on_power_button() + if not isinstance(ret, bool): + functionality_available = False + else: + functionality_available = True + except CommandExecutionError as exc: + functionality_available = False + + if functionality_available is False: + pytest.skip("Skipping. sleep_on_power_button unavailable.") + + try: + yield + finally: + power.set_sleep_on_power_button(ret) + + +@pytest.fixture +def _reset_wake_on_modem(power): + try: + ret = power.get_wake_on_modem() + if not isinstance(ret, bool): + functionality_available = False + else: + functionality_available = True + except CommandExecutionError as exc: + functionality_available = False + + if functionality_available is False: + pytest.skip("Skipping. wake_on_modem unavailable.") + + try: + yield + finally: + power.set_wake_on_modem(ret) + + +@pytest.fixture +def _reset_wake_on_network(power): + try: + ret = power.get_wake_on_network() + if not isinstance(ret, bool): + assert "Error" in ret + pytest.skip(f"Error while calling `get_wake_on_network()`: {ret}") + except CommandExecutionError as exc: + if "Not supported on this machine" in str(exc): + pytest.skip("Wake On Network Access: Not supported on this machine") + try: + yield + finally: + if isinstance(ret, bool): + ret = power.set_wake_on_network(ret) + assert ret + + +@pytest.mark.usefixtures("_reset_computer_sleep") +def test_computer_sleep(power): + """ + Test power.get_computer_sleep + Test power.set_computer_sleep + """ + + # Normal Functionality + ret = power.set_computer_sleep(90) + assert ret + + ret = power.get_computer_sleep() + assert ret == "after 90 minutes" + + ret = power.set_computer_sleep("Off") + assert ret + + ret = power.get_computer_sleep() + assert ret == "Never" + + # Test invalid input + with pytest.raises(SaltInvocationError) as exc: + power.set_computer_sleep("spongebob") + assert "Invalid String Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_computer_sleep(0) + assert "Invalid Integer Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_computer_sleep(181) + assert "Invalid Integer Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_computer_sleep(True) + assert "Invalid Boolean Value for Minutes" in str(exc.value) + + +@pytest.mark.usefixtures("_reset_display_sleep") +def test_display_sleep(power): + """ + Test power.get_display_sleep + Test power.set_display_sleep + """ + + # Normal Functionality + ret = power.set_display_sleep(90) + assert ret + + ret = power.get_display_sleep() + assert ret == "after 90 minutes" + + ret = power.set_display_sleep("Off") + assert ret + + ret = power.get_display_sleep() + assert ret == "Never" + + # Test invalid input + with pytest.raises(SaltInvocationError) as exc: + power.set_display_sleep("spongebob") + assert "Invalid String Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_display_sleep(0) + assert "Invalid Integer Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_display_sleep(181) + assert "Invalid Integer Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_display_sleep(True) + assert "Invalid Boolean Value for Minutes" in str(exc.value) + + +@pytest.mark.usefixtures("_reset_harddisk_sleep") +def test_harddisk_sleep(power): + """ + Test power.get_harddisk_sleep + Test power.set_harddisk_sleep + """ + + # Normal Functionality + ret = power.set_harddisk_sleep(90) + assert ret + + ret = power.get_harddisk_sleep() + assert ret == "after 90 minutes" + + ret = power.set_harddisk_sleep("Off") + assert ret + + ret = power.get_harddisk_sleep() + assert ret == "Never" + + # Test invalid input + with pytest.raises(SaltInvocationError) as exc: + power.set_harddisk_sleep("spongebob") + assert "Invalid String Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_harddisk_sleep(0) + assert "Invalid Integer Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_harddisk_sleep(181) + assert "Invalid Integer Value for Minutes" in str(exc.value) + + with pytest.raises(SaltInvocationError) as exc: + power.set_harddisk_sleep(True) + assert "Invalid Boolean Value for Minutes" in str(exc.value) + + +def test_restart_freeze(power): + """ + Test power.get_restart_freeze + Test power.set_restart_freeze + """ + # Normal Functionality + ret = power.set_restart_freeze("on") + assert ret + + ret = power.get_restart_freeze() + assert ret + + # This will return False because mac fails to actually make the change + ret = power.set_restart_freeze("off") + assert not ret + + # Even setting to off returns true, it actually is never set + # This is an apple bug + ret = power.get_restart_freeze() + assert ret + + +@pytest.mark.usefixtures("_reset_restart_power_failure") +def test_restart_power_failure(power): + """ + Test power.get_restart_power_failure + Test power.set_restart_power_failure + """ + ret = power.set_restart_power_failure("On") + assert ret + + ret = power.get_restart_power_failure() + assert ret + + ret = power.set_restart_power_failure("Off") + assert ret + + ret = power.get_restart_power_failure() + assert not ret + + +@pytest.mark.usefixtures("_reset_sleep_on_power_button") +def test_sleep_on_power_button(power): + """ + Test power.get_sleep_on_power_button + Test power.set_sleep_on_power_button + """ + ret = power.set_sleep_on_power_button("on") + assert ret + + ret = power.get_sleep_on_power_button() + assert ret + + ret = power.set_sleep_on_power_button("off") + assert ret + + ret = power.get_sleep_on_power_button() + assert not ret + + +@pytest.mark.usefixtures("_reset_wake_on_modem") +def test_wake_on_modem(power): + """ + Test power.get_wake_on_modem + Test power.set_wake_on_modem + """ + ret = power.set_wake_on_modem("on") + assert ret + + ret = power.get_wake_on_modem() + assert ret + + ret = power.set_wake_on_modem("off") + assert ret + + ret = power.get_wake_on_modem() + assert not ret + + +@pytest.mark.usefixtures("_reset_wake_on_network") +def test_wake_on_network(power): + """ + Test power.get_wake_on_network + Test power.set_wake_on_network + """ + ret = power.set_wake_on_network("on") + assert ret + + ret = power.get_wake_on_network() + assert ret + + ret = power.set_wake_on_network("off") + assert ret + + ret = power.get_wake_on_network() + assert not ret diff --git a/tests/pytests/functional/modules/test_mac_service.py b/tests/pytests/functional/modules/test_mac_service.py new file mode 100644 index 000000000000..0aa38a8ac8ea --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_service.py @@ -0,0 +1,252 @@ +""" +integration tests for mac_service +""" + +import plistlib + +import pytest + +import salt.utils.files +from salt.exceptions import CommandExecutionError + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.skip_if_binaries_missing("launchctl", "plutil"), + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def service(modules): + return modules.service + + +@pytest.fixture(scope="function", autouse=True) +def service_name(service): + + service_name = "com.salt.integration.test" + service_path = "/Library/LaunchDaemons/com.salt.integration.test.plist" + + service_data = { + "KeepAlive": True, + "Label": service_name, + "ProgramArguments": ["/bin/sleep", "1000"], + "RunAtLoad": True, + } + with salt.utils.files.fopen(service_path, "wb") as fp: + plistlib.dump(service_data, fp) + service.enable(service_name) + service.start(service_name) + + try: + yield service_name + finally: + # Try to stop the service if it's running + try: + service.stop(service_name) + except CommandExecutionError: + pass + salt.utils.files.safe_rm(service_path) + + +def test_show(service, service_name): + """ + Test service.show + """ + # Existing Service + service_info = service.show(service_name) + assert isinstance(service_info, dict) + assert service_info["plist"]["Label"] == service_name + + # Missing Service + with pytest.raises(CommandExecutionError) as exc: + ret = service.show("spongebob") + assert "Service not found" in str(exc.value) + + +def test_launchctl(service, service_name): + """ + Test service.launchctl + """ + # Expected Functionality + ret = service.launchctl("error", "bootstrap", 64) + assert ret + + ret = service.launchctl("error", "bootstrap", 64, return_stdout=True) + assert ret == "64: unknown error code" + + # Raise an error + with pytest.raises(CommandExecutionError) as exc: + ret = service.launchctl("error", "bootstrap") + assert "Failed to error service" in str(exc.value) + + +def test_list(service, service_name): + """ + Test service.list + """ + # Expected Functionality + ret = service.list() + assert "PID" in ret + ret = service.list(service_name) + assert "{" in ret + + # Service not found + with pytest.raises(CommandExecutionError) as exc: + ret = service.list("spongebob") + assert "Service not found" in str(exc.value) + + +def test_enable(service, service_name): + """ + Test service.enable + """ + ret = service.enable(service_name) + assert ret + + with pytest.raises(CommandExecutionError) as exc: + ret = service.enable("spongebob") + assert "Service not found" in str(exc.value) + + +def test_disable(service, service_name): + """ + Test service.disable + """ + ret = service.disable(service_name) + assert ret + + with pytest.raises(CommandExecutionError) as exc: + ret = service.disable("spongebob") + assert "Service not found" in str(exc.value) + + +def test_start(service, service_name): + """ + Test service.start + Test service.stop + Test service.status + """ + service.stop(service_name) + ret = service.start(service_name) + assert ret + + with pytest.raises(CommandExecutionError) as exc: + ret = service.start("spongebob") + assert "Service not found" in str(exc.value) + + +def test_stop(service, service_name): + """ + Test service.stop + """ + ret = service.stop(service_name) + assert ret + + with pytest.raises(CommandExecutionError) as exc: + ret = service.stop("spongebob") + assert "Service not found" in str(exc.value) + + service.start(service_name) + + +def test_status(service, service_name): + """ + Test service.status + """ + # A running service + ret = service.status(service_name) + assert ret + + # A stopped service + service.stop(service_name) + ret = service.status(service_name) + assert not ret + + # Service not found + ret = service.status("spongebob") + assert not ret + + service.start(service_name) + + +def test_available(service, service_name): + """ + Test service.available + """ + ret = service.available(service_name) + assert ret + + ret = service.available("spongebob") + assert not ret + + +def test_missing(service, service_name): + """ + Test service.missing + """ + ret = service.missing(service_name) + assert not ret + + ret = service.missing("spongebob") + assert ret + + +def test_enabled(service, service_name): + """ + Test service.enabled + """ + service.disabled(service_name) + ret = service.enabled(service_name) + assert ret + + with pytest.raises(CommandExecutionError) as exc: + ret = service.enabled("spongebob") + assert "Service not found: spongebob" in str(exc.value) + + +def test_disabled(service, service_name): + """ + Test service.disabled + """ + ret = service.disabled(service_name) + assert not ret + + ret = service.disable(service_name) + assert ret + + ret = service.disabled(service_name) + assert ret + + ret = service.enable(service_name) + assert ret + + with pytest.raises(CommandExecutionError) as exc: + ret = service.disable("spongebob") + assert "Service not found: spongebob" in str(exc.value) + + +def test_get_all(service, service_name): + """ + Test service.get_all + """ + services = service.get_all() + assert isinstance(services, list) + assert service_name in services + + +def test_get_enabled(service, service_name): + """ + Test service.get_enabled + """ + services = service.get_enabled() + assert isinstance(services, list) + assert service_name in services + + +def test_service_laoded(service, service_name): + """ + Test service.get_enabled + """ + ret = service.loaded(service_name) + assert ret diff --git a/tests/pytests/functional/modules/test_mac_shadow.py b/tests/pytests/functional/modules/test_mac_shadow.py new file mode 100644 index 000000000000..69ba93fa839f --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_shadow.py @@ -0,0 +1,174 @@ +""" +integration tests for mac_shadow +""" + +import datetime +import types + +import pytest +from saltfactories.utils import random_string + +from salt.exceptions import CommandExecutionError + +pytestmark = [ + pytest.mark.skip_if_binaries_missing("dscl", "pwpolicy"), + pytest.mark.slow_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def shadow(modules): + return modules.shadow + + +@pytest.fixture +def accounts(): + with pytest.helpers.create_account(create_group=True) as _account: + yield types.SimpleNamespace( + existing=_account.username, + non_existing=random_string("account-", lowercase=False), + ) + + +def test_info(shadow, accounts): + """ + Test shadow.info + """ + # Correct Functionality + ret = shadow.info(accounts.existing) + assert ret["name"] == accounts.existing + + # User does not exist + ret = shadow.info(accounts.non_existing) + assert ret["name"] == "" + + +def test_get_last_change(shadow, accounts): + """ + Test shadow.get_last_change + """ + # Correct Functionality + text_date = shadow.get_last_change(accounts.existing) + assert text_date != "Invalid Timestamp" + obj_date = datetime.datetime.strptime(text_date, "%Y-%m-%d %H:%M:%S") + assert isinstance(obj_date, datetime.date) + + # User does not exist + with pytest.raises(CommandExecutionError) as exc: + shadow.get_last_change(accounts.non_existing) + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + +def test_get_login_failed_last(shadow, accounts): + """ + Test shadow.get_login_failed_last + """ + # Correct Functionality + text_date = shadow.get_login_failed_last(accounts.existing) + assert text_date != "Invalid Timestamp" + obj_date = datetime.datetime.strptime(text_date, "%Y-%m-%d %H:%M:%S") + assert isinstance(obj_date, datetime.date) + + # User does not exist + with pytest.raises(CommandExecutionError) as exc: + shadow.get_login_failed_last(accounts) + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + +def test_get_login_failed_count(shadow, accounts): + """ + Test shadow.get_login_failed_count + """ + # Correct Functionality + assert shadow.get_login_failed_count(accounts.existing) == "0" + + # User does not exist + with pytest.raises(CommandExecutionError) as exc: + shadow.get_login_failed_count(accounts.non_existing) + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + +def test_get_set_maxdays(shadow, accounts): + """ + Test shadow.get_maxdays + Test shadow.set_maxdays + """ + # Correct Functionality + assert shadow.set_maxdays(accounts.existing, 20) + assert shadow.get_maxdays(accounts.existing) == 20 + + # User does not exist + with pytest.raises(CommandExecutionError) as exc: + shadow.set_maxdays(accounts.non_existing, 7) + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + with pytest.raises(CommandExecutionError) as exc: + shadow.get_maxdays(accounts.non_existing) + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + +def test_get_set_change(shadow, accounts): + """ + Test shadow.get_change + Test shadow.set_change + """ + # Correct Functionality + assert shadow.set_change(accounts.existing, "02/11/2011") + assert shadow.get_change(accounts.existing) == "02/11/2011" + + # User does not exist + with pytest.raises(CommandExecutionError) as exc: + shadow.set_change(accounts.non_existing, "02/11/2012") + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + with pytest.raises(CommandExecutionError) as exc: + shadow.get_change(accounts.non_existing) + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + +def test_get_set_expire(shadow, accounts): + """ + Test shadow.get_expire + Test shadow.set_expire + """ + # Correct Functionality + assert shadow.set_expire(accounts.existing, "02/11/2011") + assert shadow.get_expire(accounts.existing) == "02/11/2011" + + # User does not exist + with pytest.raises(CommandExecutionError) as exc: + shadow.set_expire(accounts.non_existing, "02/11/2012") + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + with pytest.raises(CommandExecutionError) as exc: + shadow.get_expire(accounts.non_existing) + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + +def test_del_password(shadow, accounts): + """ + Test shadow.del_password + """ + # Correct Functionality + assert shadow.del_password(accounts.existing) + assert shadow.info(accounts.existing)["passwd"] == "*" + + # User does not exist + with pytest.raises(CommandExecutionError) as exc: + shadow.del_password(accounts.non_existing) + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) + + +def test_set_password(shadow, accounts): + """ + Test shadow.set_password + """ + # Correct Functionality + assert shadow.set_password(accounts.existing, "Pa$$W0rd") + + # User does not exist + with pytest.raises(CommandExecutionError) as exc: + shadow.set_password(accounts.non_existing, "P@SSw0rd") + assert f"ERROR: User not found: {accounts.non_existing}" in str(exc.value) diff --git a/tests/pytests/functional/modules/test_mac_softwareupdate.py b/tests/pytests/functional/modules/test_mac_softwareupdate.py new file mode 100644 index 000000000000..8cc839f07968 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_softwareupdate.py @@ -0,0 +1,193 @@ +""" +integration tests for mac_softwareupdate +""" + +import pytest + +from salt.exceptions import SaltInvocationError + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.skip_if_binaries_missing("softwareupdate"), + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def softwareupdate(modules): + return modules.softwareupdate + + +@pytest.fixture +def _reset_schedule_enabled(softwareupdate): + ret = softwareupdate.schedule_enabled() + try: + yield + finally: + softwareupdate.schedule_enable(ret) + + +@pytest.fixture +def _reset_catalog(softwareupdate): + ret = softwareupdate.get_catalog() + try: + yield + finally: + if ret == "Default": + softwareupdate.reset_catalog() + else: + softwareupdate.set_catalog(ret) + + +@pytest.fixture +def _reset_ignored(softwareupdate): + ret = softwareupdate.list_ignored() or () + try: + yield + finally: + for item in ret: + softwareupdate.ignore(item) + + +def test_list_available(softwareupdate): + """ + Test softwareupdate.list_available + """ + # Can't predict what will be returned, so can only test that the return + # is the correct type, dict + ret = softwareupdate.list_available() + assert isinstance(ret, dict) + + +@pytest.mark.usefixtures("_reset_ignored") +@pytest.mark.skip(reason="Ignore removed from latest OS X.") +def test_ignore(softwareupdate): + """ + Test softwareupdate.ignore + Test softwareupdate.list_ignored + Test softwareupdate.reset_ignored + """ + # Test reset_ignored + ret = softwareupdate.reset_ignored() + assert ret + + ret = softwareupdate.list_ignored() + assert ret == [] + + # Test ignore + ret = softwareupdate.ignore("spongebob") + assert ret + + ret = softwareupdate.ignore("squidward") + assert ret + + # Test list_ignored and verify ignore + ret = softwareupdate.list_ignored() + assert "spongebob" in ret + + ret = softwareupdate.list_ignored() + assert "squidward" in ret + + +@pytest.mark.usefixtures("_reset_schedule_enabled") +@pytest.mark.skip(reason="Ignore schedule support removed from latest OS X.") +def test_schedule(softwareupdate): + """ + Test softwareupdate.schedule_enable + Test softwareupdate.schedule_enabled + """ + # Test enable + ret = softwareupdate.schedule_enable(True) + assert ret + + ret = softwareupdate.schedule_enabled() + assert ret + + # Test disable in case it was already enabled + ret = softwareupdate.schedule_enable(False) + assert not ret + + ret = softwareupdate.schedule_enabled() + assert not ret + + +def test_update(softwareupdate): + """ + Test softwareupdate.update_all + Test softwareupdate.update + Test softwareupdate.update_available + + Need to know the names of updates that are available to properly test + the update functions... + """ + # There's no way to know what the dictionary will contain, so all we can + # check is that the return is a dictionary + ret = softwareupdate.update_all() + assert isinstance(ret, dict) + + # Test update_available + ret = softwareupdate.update_available("spongebob") + assert not ret + + # Test update not available + with pytest.raises(SaltInvocationError) as exc: + ret = softwareupdate.update("spongebob") + assert "Update not available" in str(exc.value) + + +def test_list_downloads(softwareupdate): + """ + Test softwareupdate.list_downloads + """ + ret = softwareupdate.list_downloads() + assert isinstance(ret, list) + + +def test_download(softwareupdate): + """ + Test softwareupdate.download + + Need to know the names of updates that are available to properly test + the download function + """ + # Test update not available + with pytest.raises(SaltInvocationError) as exc: + softwareupdate.download("spongebob") + assert "Update not available" in str(exc.value) + + +def test_download_all(softwareupdate): + """ + Test softwareupdate.download_all + """ + ret = softwareupdate.download_all() + assert isinstance(ret, list) + + +@pytest.mark.usefixtures("_reset_catalog") +@pytest.mark.skip(reason="Ignore catalog support removed from latest OS X.") +def test_get_set_reset_catalog(softwareupdate): + """ + Test softwareupdate.download_all + """ + # Reset the catalog + ret = softwareupdate.reset_catalog() + assert ret + + ret = softwareupdate.get_catalog() + assert ret == "Default" + + # Test setting and getting the catalog + ret = softwareupdate.set_catalog("spongebob") + assert ret + + ret = softwareupdate.get_catalog() + assert ret == "spongebob" + + # Test reset the catalog + ret = softwareupdate.reset_catalog() + assert ret + + assert softwareupdate.get_catalog() + assert ret == "Default" diff --git a/tests/pytests/functional/modules/test_mac_system.py b/tests/pytests/functional/modules/test_mac_system.py new file mode 100644 index 000000000000..bf78b8e61756 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_system.py @@ -0,0 +1,338 @@ +""" +integration tests for mac_system +""" + +import logging + +import pytest +from saltfactories.utils import random_string + +from salt.exceptions import CommandExecutionError, SaltInvocationError + +log = logging.getLogger(__name__) + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, + pytest.mark.skip_if_binaries_missing("systemsetup"), +] + + +@pytest.fixture +def service(modules): + return modules.service + + +@pytest.fixture +def system(modules): + return modules.system + + +@pytest.fixture +def _remote_login_cleanup(system, grains): + if grains["osmajorrelease"] >= 13: + pytest.skip("SKipping until we figure out how to have full dist access") + + remote_login_enabled = system.get_remote_login() + try: + yield + finally: + if system.get_remote_login() != remote_login_enabled: + system.set_remote_login(remote_login_enabled) + + +@pytest.fixture +def _remote_events_cleanup(system, grains): + if grains["osmajorrelease"] >= 13: + pytest.skip("SKipping until we figure out how to have full dist access") + + remote_events_enabled = system.get_remote_events() + try: + yield + finally: + if system.get_remote_events() != remote_events_enabled: + system.set_remote_events(remote_events_enabled) + + +@pytest.fixture +def subnet_name(system): + subnet_name = system.get_subnet_name() + try: + yield random_string("subnet-", lowercase=False) + finally: + if system.get_subnet_name() != subnet_name: + system.set_subnet_name(subnet_name) + + +@pytest.fixture +def _keyboard_cleanup(system): + keyboard_disabled = system.get_disable_keyboard_on_lock() + try: + yield + finally: + if system.get_disable_keyboard_on_lock() != keyboard_disabled: + system.set_disable_keyboard_on_lock(keyboard_disabled) + + +@pytest.fixture +def computer_name(system): + computer_name = system.get_computer_name() + try: + yield random_string("cmptr-", lowercase=False) + finally: + if system.get_computer_name() != computer_name: + system.set_computer_name(computer_name) + + +@pytest.mark.usefixtures("_remote_login_cleanup") +def test_get_set_remote_login(system): + """ + Test system.get_remote_login + Test system.set_remote_login + """ + # Normal Functionality + ret = system.set_remote_login(True) + assert ret + + ret = system.get_remote_login() + assert ret + + ret = system.set_remote_login(False) + assert ret + + ret = system.get_remote_login() + assert not ret + + # Test valid input + ret = system.set_remote_login(True) + assert ret + + ret = system.set_remote_login(False) + assert ret + + ret = system.set_remote_login("yes") + assert ret + + ret = system.set_remote_login("no") + assert ret + + ret = system.set_remote_login("On") + assert ret + + ret = system.set_remote_login("Off") + assert ret + + ret = system.set_remote_login(1) + assert ret + + ret = system.set_remote_login(0) + assert ret + + # Test invalid input + with pytest.raises(SaltInvocationError) as exc: + system.set_remote_login("spongebob") + assert "Invalid String Value for Enabled" in str(exc.value) + + +@pytest.mark.skip_initial_gh_actions_failure +@pytest.mark.usefixtures("_remote_events_cleanup") +def test_get_set_remote_events(system): + """ + Test system.get_remote_events + Test system.set_remote_events + """ + # Normal Functionality + ret = system.set_remote_events(True) + assert ret + + ret = system.get_remote_events() + assert ret + + ret = system.set_remote_events(False) + assert ret + + ret = not system.get_remote_events() + assert not ret + + # Test valid input + ret = system.set_remote_events(True) + assert ret + + ret = system.set_remote_events(False) + assert ret + + ret = system.set_remote_events("yes") + assert ret + + ret = system.set_remote_events("no") + assert ret + + ret = system.set_remote_events("On") + assert ret + + ret = system.set_remote_events("Off") + assert ret + + ret = system.set_remote_events(1) + assert ret + + ret = system.set_remote_events(0) + assert ret + + # Test invalid input + with pytest.raises(CommandExecutionError) as exc: + system.set_remote_events("spongebob") + assert "Invalid String Value for Enabled" in str(exc.value) + + +def test_get_set_subnet_name(system, subnet_name): + """ + Test system.get_subnet_name + Test system.set_subnet_name + """ + ret = system.set_subnet_name(subnet_name) + assert ret + + ret = system.get_subnet_name() + assert ret == subnet_name + + +@pytest.mark.skip_initial_gh_actions_failure +def test_get_list_startup_disk(system): + """ + Test system.get_startup_disk + Test system.list_startup_disks + Don't know how to test system.set_startup_disk as there's usually only + one startup disk available on a system + """ + # Test list and get + ret = system.list_startup_disks() + assert isinstance(ret, list) + + startup_disk = system.get_startup_disk() + assert startup_disk in ret + + # Test passing set a bad disk + with pytest.raises(SaltInvocationError) as exc: + system.set_startup_disk("spongebob") + assert "Invalid value passed for path." in str(exc.value) + + +@pytest.mark.skip(reason="Skip this test until mac fixes it.") +def test_get_set_restart_delay(system): + """ + Test system.get_restart_delay + Test system.set_restart_delay + system.set_restart_delay does not work due to an apple bug, see docs + may need to disable this test as we can't control the delay value + """ + # Normal Functionality + ret = system.set_restart_delay(90) + assert ret + + ret = system.get_restart_delay() + assert ret == "90 seconds" + + # Pass set bad value for seconds + with pytest.raises(CommandExecutionError) as exc: + system.set_restart_delay(70) + assert "Invalid value passed for seconds." in str(exc.value) + + +@pytest.mark.usefixtures("_keyboard_cleanup") +def test_get_set_disable_keyboard_on_lock(system): + """ + Test system.get_disable_keyboard_on_lock + Test system.set_disable_keyboard_on_lock + """ + # Normal Functionality + ret = system.set_disable_keyboard_on_lock(True) + assert ret + + ret = system.get_disable_keyboard_on_lock() + assert ret + + ret = system.set_disable_keyboard_on_lock(False) + assert ret + + ret = system.get_disable_keyboard_on_lock() + assert not ret + + # Test valid input + ret = system.set_disable_keyboard_on_lock(True) + assert ret + + ret = system.set_disable_keyboard_on_lock(False) + assert ret + + ret = system.set_disable_keyboard_on_lock("yes") + assert ret + + ret = system.set_disable_keyboard_on_lock("no") + assert ret + + ret = system.set_disable_keyboard_on_lock("On") + assert ret + + ret = system.set_disable_keyboard_on_lock("Off") + assert ret + + ret = system.set_disable_keyboard_on_lock(1) + assert ret + + ret = system.set_disable_keyboard_on_lock(0) + assert ret + + # Test invalid input + with pytest.raises(SaltInvocationError) as exc: + system.set_disable_keyboard_on_lock("spongebob") + assert "Invalid String Value for Enabled" in str(exc.value) + + +@pytest.mark.skip(reason="Skip this test until mac fixes it.") +def test_get_set_boot_arch(system): + """ + Test system.get_boot_arch + Test system.set_boot_arch + system.set_boot_arch does not work due to an apple bug, see docs + may need to disable this test as we can't set the boot architecture + """ + # Normal Functionality + ret = system.set_boot_arch("i386") + assert ret + + ret = system.get_boot_arch() + assert ret == "i386" + + ret = system.set_boot_arch("default") + assert ret + + ret = system.get_boot_arch() + assert ret == "default" + + # Test invalid input + with pytest.raises(CommandExecutionError) as exc: + system.set_boot_arch("spongebob") + assert "Invalid value passed for arch" in str(exc.value) + + +# A similar test used to be skipped on py3 due to 'hanging', if we see +# something similar again we may want to skip this gain until we +# investigate +# @pytest.mark.skipif(salt.utils.platform.is_darwin() and six.PY3, reason='This test hangs on OS X on Py3. Skipping until #53566 is merged.') +@pytest.mark.destructive_test +def test_get_set_computer_name(system, computer_name): + """ + Test system.get_computer_name + Test system.set_computer_name + """ + current_computer_name = system.get_computer_name() + assert current_computer_name + assert current_computer_name != computer_name + + ret = system.set_computer_name(computer_name) + assert ret + + ret = system.get_computer_name() + assert ret == computer_name diff --git a/tests/pytests/functional/modules/test_mac_timezone.py b/tests/pytests/functional/modules/test_mac_timezone.py new file mode 100644 index 000000000000..2f153dd9b35a --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_timezone.py @@ -0,0 +1,242 @@ +""" +Integration tests for mac_timezone + +If using parallels, make sure Time sync is turned off. Otherwise, parallels will +keep changing your date/time settings while the tests are running. To turn off +Time sync do the following: + - Go to actions -> configure + - Select options at the top and 'More Options' on the left + - Set time to 'Do not sync' +""" + +import datetime + +import pytest + +from salt.exceptions import SaltInvocationError + +pytestmark = [ + pytest.mark.skip_if_binaries_missing("systemsetup"), + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def timezone(modules): + return modules.timezone + + +@pytest.fixture +def _reset_time_server(timezone): + ret = timezone.get_time_server() + try: + yield + finally: + if timezone.get_time_server() != ret: + timezone.set_time_server(ret) + + +@pytest.fixture +def _reset_using_network_time(timezone): + ret = timezone.get_using_network_time() + try: + timezone.set_using_network_time(False) + yield ret + finally: + timezone.set_using_network_time(ret) + + +@pytest.fixture +def _reset_time(timezone, _reset_using_network_time): + ret = timezone.get_time() + try: + yield + finally: + if not _reset_using_network_time: + timezone.set_time(ret) + + +@pytest.fixture +def _reset_date(timezone, _reset_using_network_time): + ret = timezone.get_date() + try: + yield + finally: + if not _reset_using_network_time: + timezone.set_date(ret) + + +@pytest.fixture +def _reset_zone(timezone): + ret = timezone.get_zone() + try: + timezone.set_zone("America/Denver") + yield + finally: + timezone.set_zone(ret) + + +@pytest.mark.usefixtures("_reset_date") +def test_get_set_date(timezone): + """ + Test timezone.get_date + Test timezone.set_date + """ + # Correct Functionality + ret = timezone.set_date("2/20/2011") + assert ret + ret = timezone.get_date() + assert ret == "2/20/2011" + + # Test bad date format + with pytest.raises(SaltInvocationError) as exc: + ret = timezone.set_date("13/12/2014") + assert ( + "ERROR executing 'timezone.set_date': Invalid Date/Time Format: 13/12/2014" + in str(exc.value) + ) + + +@pytest.mark.slow_test +def test_get_time(timezone): + """ + Test timezone.get_time + """ + text_time = timezone.get_time() + assert text_time != "Invalid Timestamp" + obj_date = datetime.datetime.strptime(text_time, "%H:%M:%S") + assert isinstance(obj_date, datetime.date) + + +@pytest.mark.usefixtures("_reset_time") +def test_set_time(timezone): + """ + Test timezone.set_time + """ + # Correct Functionality + ret = timezone.set_time("3:14") + assert ret + + # Test bad time format + with pytest.raises(SaltInvocationError) as exc: + ret = timezone.set_time("3:71") + assert ( + "ERROR executing 'timezone.set_time': Invalid Date/Time Format: 3:71" + in str(exc.value) + ) + + +@pytest.mark.usefixtures("_reset_zone") +def test_get_set_zone(timezone): + """ + Test timezone.get_zone + Test timezone.set_zone + """ + # Correct Functionality + ret = timezone.set_zone("Pacific/Wake") + assert ret + + ret = timezone.get_zone() + assert ret == "Pacific/Wake" + + # Test bad time zone + with pytest.raises(SaltInvocationError) as exc: + ret = timezone.set_zone("spongebob") + assert ( + "ERROR executing 'timezone.set_zone': Invalid Timezone: spongebob" + in str(exc.value) + ) + + +@pytest.mark.usefixtures("_reset_zone") +def test_get_offset(timezone): + """ + Test timezone.get_offset + """ + ret = timezone.set_zone("Pacific/Wake") + assert ret + ret = timezone.get_offset() + assert isinstance(ret, str) + assert ret == "+1200" + + ret = timezone.set_zone("America/Los_Angeles") + assert ret + ret = timezone.get_offset() + assert isinstance(ret, str) + assert ret == "-0800" + + +@pytest.mark.usefixtures("_reset_zone") +def test_get_set_zonecode(timezone): + """ + Test timezone.get_zonecode + Test timezone.set_zonecode + """ + ret = timezone.set_zone("America/Los_Angeles") + assert ret + ret = timezone.get_zone() + assert isinstance(ret, str) + assert ret == "America/Los_Angeles" + + ret = timezone.set_zone("Pacific/Wake") + assert ret + ret = timezone.get_zone() + assert isinstance(ret, str) + assert ret == "Pacific/Wake" + + +@pytest.mark.slow_test +def test_list_zones(timezone): + """ + Test timezone.list_zones + """ + zones = timezone.list_zones() + assert isinstance(zones, list) + assert "America/Denver" in zones + assert "America/Los_Angeles" in zones + + +@pytest.mark.usefixtures("_reset_zone") +def test_zone_compare(timezone): + """ + Test timezone.zone_compare + """ + ret = timezone.zone_compare("America/Denver") + assert ret + ret = timezone.zone_compare("Pacific/Wake") + assert not ret + + +@pytest.mark.usefixtures("_reset_using_network_time") +def test_get_set_using_network_time(timezone): + """ + Test timezone.get_using_network_time + Test timezone.set_using_network_time + """ + ret = timezone.set_using_network_time(True) + assert ret + + ret = timezone.get_using_network_time() + assert ret + + ret = timezone.set_using_network_time(False) + assert ret + + ret = timezone.get_using_network_time() + assert not ret + + +@pytest.mark.usefixtures("_reset_time_server") +def test_get_set_time_server(timezone): + """ + Test timezone.get_time_server + Test timezone.set_time_server + """ + ret = timezone.set_time_server("spongebob.com") + assert ret + + ret = timezone.get_time_server() + assert ret == "spongebob.com" diff --git a/tests/pytests/functional/modules/test_mac_user.py b/tests/pytests/functional/modules/test_mac_user.py new file mode 100644 index 000000000000..a885e500950f --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_user.py @@ -0,0 +1,189 @@ +""" + :codeauthor: Nicole Thomas +""" + +import os + +import pytest +from saltfactories.utils import random_string + +import salt.utils.files + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def user(modules): + return modules.user + + +@pytest.fixture +def _reset_enable_auto_login(user): + # Make sure auto login is disabled before we start + if user.get_auto_login(): + pytest.skip("Auto login already enabled") + + try: + yield + finally: + # Make sure auto_login is disabled + ret = user.disable_auto_login() + assert ret + + # Make sure autologin is disabled + ret = user.get_auto_login() + if ret: + pytest.fail("Failed to disable auto login") + + +@pytest.fixture +def existing_user(user): + username = random_string("account-", uppercase=False) + try: + ret = user.add(username) + if ret is not True: + pytest.skip("Failed to create an account to manipulate") + yield username + finally: + user_info = user.info(username) + if user_info: + user.delete(username) + + +@pytest.fixture +def non_existing_user(user): + username = random_string("account-", uppercase=False) + try: + yield username + finally: + user_info = user.info(username) + if user_info: + user.delete(username) + + +def test_mac_user_add(user, non_existing_user): + """ + Tests the add function + """ + user.add(non_existing_user) + user_info = user.info(non_existing_user) + assert user_info["name"] == non_existing_user + + +def test_mac_user_delete(user, existing_user): + """ + Tests the delete function + """ + ret = user.delete(existing_user) + assert ret + + +def test_mac_user_primary_group(user, existing_user): + """ + Tests the primary_group function + """ + primary_group = user.primary_group(existing_user) + uid_info = user.info(existing_user) + assert primary_group in uid_info["groups"] + + +def test_mac_user_changes(user, existing_user): + """ + Tests mac_user functions that change user properties + """ + # Test mac_user.chuid + user.chuid(existing_user, 4376) + uid_info = user.info(existing_user) + assert uid_info["uid"] == 4376 + + # Test mac_user.chgid + user.chgid(existing_user, 4376) + gid_info = user.info(existing_user) + assert gid_info["gid"] == 4376 + + # Test mac.user.chshell + user.chshell(existing_user, "/bin/zsh") + shell_info = user.info(existing_user) + assert shell_info["shell"] == "/bin/zsh" + + # Test mac_user.chhome + user.chhome(existing_user, "/Users/foo") + home_info = user.info(existing_user) + assert home_info["home"] == "/Users/foo" + + # Test mac_user.chfullname + user.chfullname(existing_user, "Foo Bar") + fullname_info = user.info(existing_user) + assert fullname_info["fullname"] == "Foo Bar" + + # Test mac_user.chgroups + ret = user.info(existing_user) + pre_info = ret["groups"] + expected = pre_info + ["wheel"] + user.chgroups(existing_user, "wheel") + groups_info = user.info(existing_user) + assert groups_info["groups"] == expected + + +@pytest.mark.usefixtures("_reset_enable_auto_login") +def test_mac_user_enable_auto_login(user): + """ + Tests mac_user functions that enable auto login + """ + # Does enable return True + ret = user.enable_auto_login("Spongebob", "Squarepants") + assert ret + + # Did it set the user entry in the plist file + ret = user.get_auto_login() + assert ret == "Spongebob" + + # Did it generate the `/etc/kcpassword` file + assert os.path.exists("/etc/kcpassword") + + # Are the contents of the file correct + test_data = bytes.fromhex("2e f8 27 42 a0 d9 ad 8b cd cd 6c 7d") + with salt.utils.files.fopen("/etc/kcpassword", "rb") as f: + file_data = f.read() + assert test_data == file_data + + # Does disable return True + ret = user.disable_auto_login() + assert ret + + # Does it remove the user entry in the plist file + ret = user.get_auto_login() + assert not ret + + # Is the `/etc/kcpassword` file removed + assert not os.path.exists("/etc/kcpassword") + + +@pytest.mark.usefixtures("_reset_enable_auto_login") +def test_mac_user_disable_auto_login(user): + """ + Tests mac_user functions that disable auto login + """ + # Enable auto login for the test + user.enable_auto_login("Spongebob", "Squarepants") + + # Make sure auto login got set up + ret = user.get_auto_login() + if not ret == "Spongebob": + raise pytest.fail("Failed to enable auto login") + + # Does disable return True + ret = user.disable_auto_login() + assert ret + + # Does it remove the user entry in the plist file + ret = user.get_auto_login() + assert not ret + + # Is the `/etc/kcpassword` file removed + assert not os.path.exists("/etc/kcpassword") diff --git a/tests/pytests/functional/modules/test_mac_xattr.py b/tests/pytests/functional/modules/test_mac_xattr.py new file mode 100644 index 000000000000..9a91576f2858 --- /dev/null +++ b/tests/pytests/functional/modules/test_mac_xattr.py @@ -0,0 +1,176 @@ +""" +integration tests for mac_xattr +""" + +import pytest + +from salt.exceptions import CommandExecutionError + +pytestmark = [ + pytest.mark.skip_if_binaries_missing("xattr"), + pytest.mark.slow_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="module") +def xattr(modules): + return modules.xattr + + +@pytest.fixture +def existing_file(tmp_path): + fpath = tmp_path / "xattr_test_file.txt" + fpath.touch() + return fpath + + +@pytest.fixture +def non_existing_file(tmp_path): + return tmp_path / "xattr_no_file" + + +def test_list_no_xattr(xattr, existing_file, non_existing_file): + """ + Make sure there are no attributes + """ + # Clear existing attributes + ret = xattr.clear(existing_file) + assert ret + + # Test no attributes + ret = xattr.list(existing_file) + assert ret == {} + + # Test file not found + with pytest.raises(CommandExecutionError) as exc: + ret = xattr.list(non_existing_file) + assert f"File not found: {non_existing_file}" in str(exc.value) + + +def test_write(xattr, existing_file, non_existing_file): + """ + Write an attribute + """ + # Clear existing attributes + ret = xattr.clear(existing_file) + assert ret + + # Write some attributes + ret = xattr.write(existing_file, "spongebob", "squarepants") + assert ret + + ret = xattr.write(existing_file, "squidward", "plankton") + assert ret + + ret = xattr.write(existing_file, "crabby", "patty") + assert ret + + # Test that they were actually added + ret = xattr.list(existing_file) + assert ret == { + "spongebob": "squarepants", + "squidward": "plankton", + "crabby": "patty", + } + + # Test file not found + with pytest.raises(CommandExecutionError) as exc: + ret = xattr.write(non_existing_file, "patrick", "jellyfish") + assert f"File not found: {non_existing_file}" in str(exc.value) + + +def test_read(xattr, existing_file, non_existing_file): + """ + Test xattr.read + """ + # Clear existing attributes + ret = xattr.clear(existing_file) + assert ret + + # Write an attribute + ret = xattr.write(existing_file, "spongebob", "squarepants") + assert ret + + # Read the attribute + ret = xattr.read(existing_file, "spongebob") + assert ret == "squarepants" + + # Test file not found + with pytest.raises(CommandExecutionError) as exc: + ret = xattr.read(non_existing_file, "spongebob") + assert f"File not found: {non_existing_file}" in str(exc.value) + + # Test attribute not found + with pytest.raises(CommandExecutionError) as exc: + ret = xattr.read(existing_file, "patrick") + assert "Attribute not found: patrick" in str(exc.value) + + +def test_delete(xattr, existing_file, non_existing_file): + """ + Test xattr.delete + """ + # Clear existing attributes + ret = xattr.clear(existing_file) + assert ret + + # Write some attributes + ret = xattr.write(existing_file, "spongebob", "squarepants") + assert ret + + ret = xattr.write(existing_file, "squidward", "plankton") + assert ret + + ret = xattr.write(existing_file, "crabby", "patty") + assert ret + + # Delete an attribute + ret = xattr.delete(existing_file, "squidward") + assert ret + + # Make sure it was actually deleted + ret = xattr.list(existing_file) + assert ret == { + "spongebob": "squarepants", + "crabby": "patty", + } + + # Test file not found + with pytest.raises(CommandExecutionError) as exc: + ret = xattr.delete(non_existing_file, "spongebob") + assert f"File not found: {non_existing_file}" in str(exc.value) + + # Test attribute not found + with pytest.raises(CommandExecutionError) as exc: + ret = xattr.delete(existing_file, "patrick") + assert "Attribute not found: patrick" in str(exc.value) + + +def test_clear(xattr, existing_file, non_existing_file): + """ + Test xattr.clear + """ + # Clear existing attributes + ret = xattr.clear(existing_file) + assert ret + + # Write some attributes + ret = xattr.write(existing_file, "spongebob", "squarepants") + assert ret + + ret = xattr.write(existing_file, "squidward", "plankton") + assert ret + + ret = xattr.write(existing_file, "crabby", "patty") + assert ret + + # Test Clear + ret = xattr.clear(existing_file) + assert ret + + # Test file not found + with pytest.raises(CommandExecutionError) as exc: + ret = xattr.clear(non_existing_file) + assert f"File not found: {non_existing_file}" in str(exc.value) diff --git a/tests/pytests/functional/modules/test_mysql.py b/tests/pytests/functional/modules/test_mysql.py index c37a508588b0..d920bbdbc032 100644 --- a/tests/pytests/functional/modules/test_mysql.py +++ b/tests/pytests/functional/modules/test_mysql.py @@ -19,6 +19,7 @@ pytest.mark.skipif( mysqlmod.MySQLdb is None, reason="No python mysql client installed." ), + pytest.mark.skip_on_fips_enabled_platform, ] diff --git a/tests/pytests/functional/modules/test_pip.py b/tests/pytests/functional/modules/test_pip.py index 5c67fa3f5628..f5040cafbf84 100644 --- a/tests/pytests/functional/modules/test_pip.py +++ b/tests/pytests/functional/modules/test_pip.py @@ -21,6 +21,7 @@ ), ) @pytest.mark.requires_network +@pytest.mark.slow_test def test_list_available_packages(modules, pip_version, tmp_path): with VirtualEnv(venv_dir=tmp_path, pip_requirement=pip_version) as virtualenv: virtualenv.install("-U", pip_version) @@ -29,3 +30,28 @@ def test_list_available_packages(modules, pip_version, tmp_path): package_name, bin_env=str(virtualenv.venv_bin_dir) ) assert available_versions + + +@pytest.mark.parametrize( + "pip_version", + ( + "pip==9.0.3", + "pip<20.0", + "pip<21.0", + "pip>=21.0", + ), +) +def test_list_available_packages_with_index_url(modules, pip_version, tmp_path): + if sys.version_info < (3, 6) and pip_version == "pip>=21.0": + pytest.skip("{} is not available on Py3.5".format(pip_version)) + if sys.version_info >= (3, 10) and pip_version == "pip==9.0.3": + pytest.skip("{} is not available on Py3.10".format(pip_version)) + with VirtualEnv(venv_dir=tmp_path, pip_requirement=pip_version) as virtualenv: + virtualenv.install("-U", pip_version) + package_name = "pep8" + available_versions = modules.pip.list_all_versions( + package_name, + bin_env=str(virtualenv.venv_bin_dir), + index_url="https://pypi.python.org/simple", + ) + assert available_versions diff --git a/tests/pytests/functional/modules/test_pkg.py b/tests/pytests/functional/modules/test_pkg.py index 707361c227ba..8e16fd3fdc4a 100644 --- a/tests/pytests/functional/modules/test_pkg.py +++ b/tests/pytests/functional/modules/test_pkg.py @@ -1,6 +1,7 @@ import configparser import logging import os +import re import shutil import tempfile import time @@ -63,6 +64,8 @@ def test_pkg(grains): elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": _pkg = "snoopy" + elif grains["osfinger"] == "Amazon Linux-2023": + return "dnf-utils" else: _pkg = "units" elif grains["os_family"] == "Debian": @@ -535,3 +538,32 @@ def test_list_repos_duplicate_entries(preserve_rhel_yum_conf, grains, modules): with pytest.raises(configparser.DuplicateOptionError) as exc_info: result = modules.pkg.list_repos() assert "{}".format(exc_info.value) == expected + + +@pytest.mark.destructive_test +@pytest.mark.slow_test +def test_pkg_install_port(grains, modules): + """ + test install package with a port in the url + """ + pkgs = modules.pkg.list_pkgs() + nano = pkgs.get("nano") + if nano: + modules.pkg.remove("nano") + + if grains["os_family"] == "Debian": + url = modules.cmd.run("apt download --print-uris nano").split()[-4] + if url.startswith("'mirror+file"): + url = "http://ftp.debian.org/debian/pool/" + url.split("pool")[1].rstrip( + "'" + ) + try: + ret = modules.pkg.install(sources=f'[{{"nano":{url}}}]') + version = re.compile(r"\d\.\d") + assert version.search(url).group(0) in ret["nano"]["new"] + finally: + modules.pkg.remove("nano") + if nano: + # If nano existed on the machine before the test ran + # re-install that version + modules.pkg.install(f"nano={nano}") diff --git a/tests/pytests/functional/modules/test_saltutil.py b/tests/pytests/functional/modules/test_saltutil.py index ca61f70143a4..f9e72a9f73e6 100644 --- a/tests/pytests/functional/modules/test_saltutil.py +++ b/tests/pytests/functional/modules/test_saltutil.py @@ -47,6 +47,7 @@ def get_top(configure_mocks): yield get_top +@pytest.mark.slow_test def test__get_top_file_envs(modules, get_top, destroy): """ Ensure we cleanup objects created by saltutil._get_top_file_envs #60449 diff --git a/tests/pytests/functional/modules/test_service.py b/tests/pytests/functional/modules/test_service.py new file mode 100644 index 000000000000..51ce7f1cc0b5 --- /dev/null +++ b/tests/pytests/functional/modules/test_service.py @@ -0,0 +1,157 @@ +import os + +import pytest + +import salt.utils.path +import salt.utils.platform +import salt.utils.systemd +from salt.exceptions import CommandExecutionError + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.destructive_test, + pytest.mark.slow_test, +] + + +@pytest.fixture +def service_name(grains, modules): + # For local testing purposes + env_name = os.environ.get("SALT_FUNCTIONAL_TEST_SERVICE_NAME") + if env_name is not None: + return env_name + + service_name = "cron" + cmd_name = "crontab" + os_family = grains.get("os_family") + is_systemd = grains.get("systemd") + if os_family == "RedHat": + service_name = "crond" + elif os_family == "Arch": + service_name = "sshd" + cmd_name = "systemctl" + elif os_family == "MacOS": + service_name = "com.apple.AirPlayXPCHelper" + elif os_family == "Windows": + service_name = "Spooler" + + if os_family != "Windows" and salt.utils.path.which(cmd_name) is None: + pytest.skip("{} is not installed".format(cmd_name)) + + if is_systemd and modules.service.offline(): + pytest.skip("systemd is OFFLINE") + + return service_name + + +@pytest.fixture(autouse=True) +def setup_service(service_name, modules): + pre_srv_status = modules.service.status(service_name) + pre_srv_enabled = service_name in modules.service.get_enabled() + + try: + yield pre_srv_status + finally: + post_srv_status = modules.service.status(service_name) + post_srv_enabled = service_name in modules.service.get_enabled() + + if post_srv_status != pre_srv_status: + if pre_srv_status: + modules.service.start(service_name) + else: + modules.service.stop(service_name) + + if post_srv_enabled != pre_srv_enabled: + if pre_srv_enabled: + modules.service.enable(service_name) + else: + modules.service.disable(service_name) + + +def test_service_status_running(modules, service_name): + """ + test service.status execution module + when service is running + """ + modules.service.start(service_name) + check_service = modules.service.status(service_name) + assert check_service + + +def test_service_status_dead(modules, service_name): + """ + test service.status execution module + when service is dead + """ + modules.service.stop(service_name) + check_service = modules.service.status(service_name) + assert not check_service + + +def test_service_restart(modules, service_name): + """ + test service.restart + """ + assert modules.service.stop(service_name) + + +def test_service_enable(modules, service_name): + """ + test service.get_enabled and service.enable module + """ + # disable service before test + assert modules.service.disable(service_name) + + assert modules.service.enable(service_name) + assert service_name in modules.service.get_enabled() + + +def test_service_disable(modules, service_name): + """ + test service.get_disabled and service.disable module + """ + # enable service before test + assert modules.service.enable(service_name) + + assert modules.service.disable(service_name) + if salt.utils.platform.is_darwin(): + assert modules.service.disabled(service_name) + else: + assert service_name in modules.service.get_disabled() + + +def test_service_disable_doesnot_exist(modules): + """ + test service.get_disabled and service.disable module + when service name does not exist + """ + # enable service before test + srv_name = "doesnotexist" + try: + enable = modules.service.enable(srv_name) + assert not enable + except CommandExecutionError as exc: + assert srv_name in exc.error or "no such file or directory" in exc.error.lower() + + try: + disable = modules.service.disable(srv_name) + assert not disable + except CommandExecutionError as exc: + assert srv_name in exc.error or "no such file or directory" in exc.error.lower() + + if salt.utils.platform.is_darwin(): + with pytest.raises( + CommandExecutionError, match=f"Service not found: {srv_name}" + ): + modules.service.disabled(srv_name) + else: + assert srv_name not in modules.service.get_disabled() + + +@pytest.mark.skip_unless_on_windows +def test_service_get_service_name(modules, service_name): + """ + test service.get_service_name + """ + ret = modules.service.get_service_name() + assert service_name in ret.values() diff --git a/tests/pytests/functional/modules/test_system.py b/tests/pytests/functional/modules/test_system.py index 0249ba71269b..2dabaaebfadb 100644 --- a/tests/pytests/functional/modules/test_system.py +++ b/tests/pytests/functional/modules/test_system.py @@ -9,7 +9,10 @@ import salt.utils.files -pytestmark = [pytest.mark.skip_unless_on_linux] +pytestmark = [ + pytest.mark.skip_unless_on_linux, + pytest.mark.slow_test, +] log = logging.getLogger(__name__) diff --git a/tests/pytests/functional/modules/test_test.py b/tests/pytests/functional/modules/test_test.py new file mode 100644 index 000000000000..afe4bc13a19e --- /dev/null +++ b/tests/pytests/functional/modules/test_test.py @@ -0,0 +1,13 @@ +import pytest + +import salt.modules.test as test + + +def test_raise_exception(): + """ + Add test raising an exception in test module. + """ + msg = "message" + with pytest.raises(Exception) as err: + test.exception(message=msg) + assert err.match(msg) diff --git a/tests/pytests/functional/modules/test_win_certutil.py b/tests/pytests/functional/modules/test_win_certutil.py index ca2e2a219cba..2e8a7984caee 100644 --- a/tests/pytests/functional/modules/test_win_certutil.py +++ b/tests/pytests/functional/modules/test_win_certutil.py @@ -10,6 +10,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/modules/test_win_dsc.py b/tests/pytests/functional/modules/test_win_dsc.py index ab1b2f8a0619..76255968631d 100644 --- a/tests/pytests/functional/modules/test_win_dsc.py +++ b/tests/pytests/functional/modules/test_win_dsc.py @@ -10,6 +10,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/modules/test_win_shortcut.py b/tests/pytests/functional/modules/test_win_shortcut.py index 1afa0217bc7f..292f439536b8 100644 --- a/tests/pytests/functional/modules/test_win_shortcut.py +++ b/tests/pytests/functional/modules/test_win_shortcut.py @@ -21,6 +21,7 @@ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.skipif(not HAS_WIN32, reason="Requires Win32 libraries"), + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/modules/test_x509_v2.py b/tests/pytests/functional/modules/test_x509_v2.py index 8da31bed9d09..dfb973af1089 100644 --- a/tests/pytests/functional/modules/test_x509_v2.py +++ b/tests/pytests/functional/modules/test_x509_v2.py @@ -23,7 +23,8 @@ CRYPTOGRAPHY_VERSION = tuple(int(x) for x in cryptography.__version__.split(".")) pytestmark = [ - pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library") + pytest.mark.skip_on_fips_enabled_platform, + pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library"), ] @@ -1359,6 +1360,14 @@ def test_create_csr_with_extensions(x509, rsa_privkey): assert res.startswith("-----BEGIN CERTIFICATE REQUEST-----") +def test_create_csr_with_wildcard_san(x509, rsa_privkey): + """ + Test that wildcards in SAN extension are supported. Issue #65072 + """ + res = x509.create_csr(private_key=rsa_privkey, subjectAltName="DNS:*.salt.ca") + assert res.startswith("-----BEGIN CERTIFICATE REQUEST-----") + + @pytest.mark.parametrize("encoding", ["pem", "der"]) def test_create_csr_write_to_path(x509, encoding, rsa_privkey, tmp_path): tgt = tmp_path / "csr" @@ -1416,13 +1425,20 @@ def test_create_private_key_pkcs12(x509, passphrase): @pytest.mark.parametrize("encoding", ["pem", "der"]) def test_create_private_key_write_to_path(x509, encoding, tmp_path): - tgt = tmp_path / "csr" + tgt = tmp_path / "pk" x509.create_private_key(encoding=encoding, path=str(tgt)) assert tgt.exists() if encoding == "pem": assert tgt.read_text().startswith("-----BEGIN PRIVATE KEY-----") +def test_create_private_key_write_to_path_encrypted(x509, tmp_path): + tgt = tmp_path / "pk" + x509.create_private_key(path=str(tgt), passphrase="hunter1") + assert tgt.exists() + assert tgt.read_text().startswith("-----BEGIN ENCRYPTED PRIVATE KEY-----") + + @pytest.mark.parametrize("encoding", ["pem", "der"]) def test_create_private_key_write_to_path_overwrite(x509, encoding, tmp_path): tgt = tmp_path / "cert" @@ -1581,8 +1597,32 @@ def test_verify_crl(x509, crl, ca_cert): assert x509.verify_crl(crl, ca_cert) is True -def test_verify_private_key(x509, ca_key, ca_cert): - assert x509.verify_private_key(ca_key, ca_cert) is True +def test_encode_private_key(x509, rsa_privkey): + pk = x509.create_private_key() + res = x509.encode_private_key(pk) + assert res.strip() == pk.strip() + + +def test_encode_private_key_encrypted(x509, ca_key, ca_key_enc): + pk = x509.create_private_key() + pk_enc = x509.encode_private_key(pk, passphrase="hunter1") + res = x509.encode_private_key(pk_enc, private_key_passphrase="hunter1") + assert res.strip() == pk.strip() + + +@pytest.mark.parametrize("privkey,expected", [("ca_key", True), ("rsa_privkey", False)]) +def test_verify_private_key(x509, request, privkey, expected, ca_cert): + pk = request.getfixturevalue(privkey) + assert x509.verify_private_key(pk, ca_cert) is expected + + +def test_verify_private_key_with_passphrase(x509, ca_key_enc, ca_cert): + assert ( + x509.verify_private_key( + ca_key_enc, ca_cert, passphrase="correct horse battery staple" + ) + is True + ) @pytest.mark.parametrize("algo", ["rsa", "ec", "ed25519", "ed448"]) diff --git a/tests/pytests/functional/modules/test_yumpkg.py b/tests/pytests/functional/modules/test_yumpkg.py new file mode 100644 index 000000000000..36b357a61705 --- /dev/null +++ b/tests/pytests/functional/modules/test_yumpkg.py @@ -0,0 +1,41 @@ +import pytest + +import salt.modules.cmdmod +import salt.modules.pkg_resource +import salt.modules.yumpkg +import salt.utils.pkg.rpm + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + salt.modules.yumpkg: { + "__salt__": { + "cmd.run": salt.modules.cmdmod.run, + "pkg_resource.add_pkg": salt.modules.pkg_resource.add_pkg, + "pkg_resource.format_pkg_list": salt.modules.pkg_resource.format_pkg_list, + }, + "__grains__": {"osarch": salt.utils.pkg.rpm.get_osarch()}, + }, + } + + +@pytest.mark.slow_test +def test_yum_list_pkgs(grains): + """ + compare the output of rpm -qa vs the return of yumpkg.list_pkgs, + make sure that any changes to ympkg.list_pkgs still returns. + """ + + if grains["os_family"] != "RedHat": + pytest.skip("Skip if not RedHat") + cmd = [ + "rpm", + "-qa", + "--queryformat", + "%{NAME}\n", + ] + known_pkgs = salt.modules.cmdmod.run(cmd, python_shell=False) + listed_pkgs = salt.modules.yumpkg.list_pkgs() + for line in known_pkgs.splitlines(): + assert any(line in d for d in listed_pkgs) diff --git a/tests/pytests/functional/modules/win_file/test_check_perms.py b/tests/pytests/functional/modules/win_file/test_check_perms.py index 53e656eab18d..7b829b35eed0 100644 --- a/tests/pytests/functional/modules/win_file/test_check_perms.py +++ b/tests/pytests/functional/modules/win_file/test_check_perms.py @@ -21,9 +21,9 @@ def configure_loader_modules(): "__utils__": { "dacl.check_perms": win_dacl.check_perms, "dacl.set_perms": win_dacl.set_perms, - } + }, + "__opts__": {"test": False}, }, - win_dacl: {"__opts__": {"test": False}}, } @@ -43,7 +43,7 @@ def test_check_perms_set_owner_test_true(test_file): "name": str(test_file), "result": None, } - with patch.dict(win_dacl.__opts__, {"test": True}): + with patch.dict(win_file.__opts__, {"test": True}): result = win_file.check_perms( path=str(test_file), owner="Backup Operators", inheritance=None ) @@ -76,7 +76,7 @@ def test_check_perms_deny_test_true(test_file): "name": str(test_file), "result": None, } - with patch.dict(win_dacl.__opts__, {"test": True}): + with patch.dict(win_file.__opts__, {"test": True}): result = win_file.check_perms( path=str(test_file), deny_perms={"Users": {"perms": "read_execute"}}, @@ -113,7 +113,7 @@ def test_check_perms_grant_test_true(test_file): "name": str(test_file), "result": None, } - with patch.dict(win_dacl.__opts__, {"test": True}): + with patch.dict(win_file.__opts__, {"test": True}): result = win_file.check_perms( path=str(test_file), grant_perms={"Users": {"perms": "read_execute"}}, @@ -150,7 +150,7 @@ def test_check_perms_inheritance_false_test_true(test_file): "name": str(test_file), "result": None, } - with patch.dict(win_dacl.__opts__, {"test": True}): + with patch.dict(win_file.__opts__, {"test": True}): result = win_file.check_perms(path=str(test_file), inheritance=False) assert result == expected @@ -214,7 +214,7 @@ def test_check_perms_reset_test_true(test_file): "name": str(test_file), "result": None, } - with patch.dict(win_dacl.__opts__, {"test": True}): + with patch.dict(win_file.__opts__, {"test": True}): result = win_file.check_perms( path=str(test_file), grant_perms={ diff --git a/tests/pytests/functional/modules/win_lgpo/test_audit_settings_module.py b/tests/pytests/functional/modules/win_lgpo/test_audit_settings_module.py index 3aa1afdb6526..2830c7a7d272 100644 --- a/tests/pytests/functional/modules/win_lgpo/test_audit_settings_module.py +++ b/tests/pytests/functional/modules/win_lgpo/test_audit_settings_module.py @@ -6,6 +6,7 @@ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.destructive_test, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/modules/win_lgpo/test_get_policy.py b/tests/pytests/functional/modules/win_lgpo/test_get_policy.py index 58651f28c528..ef91a3198fb2 100644 --- a/tests/pytests/functional/modules/win_lgpo/test_get_policy.py +++ b/tests/pytests/functional/modules/win_lgpo/test_get_policy.py @@ -3,6 +3,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/modules/win_lgpo/test_get_policy_info.py b/tests/pytests/functional/modules/win_lgpo/test_get_policy_info.py index 76ce483f1d34..09de8845f236 100644 --- a/tests/pytests/functional/modules/win_lgpo/test_get_policy_info.py +++ b/tests/pytests/functional/modules/win_lgpo/test_get_policy_info.py @@ -10,6 +10,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/modules/win_lgpo/test_registry_policy.py b/tests/pytests/functional/modules/win_lgpo/test_registry_policy.py index 543bcc9c945e..8337c90b4db0 100644 --- a/tests/pytests/functional/modules/win_lgpo/test_registry_policy.py +++ b/tests/pytests/functional/modules/win_lgpo/test_registry_policy.py @@ -6,6 +6,7 @@ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.destructive_test, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py b/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py index d039e75d29b9..7469897a811c 100644 --- a/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py +++ b/tests/pytests/functional/netapi/rest_tornado/test_websockets_handler.py @@ -6,6 +6,7 @@ import salt.netapi.rest_tornado as rest_tornado import salt.utils.json import salt.utils.yaml +from salt.config import DEFAULT_HASH_TYPE from salt.ext.tornado.httpclient import HTTPError, HTTPRequest from salt.ext.tornado.websocket import websocket_connect @@ -51,7 +52,9 @@ async def test_websocket_handler_bad_token(client_config, http_server): A bad token should returns a 401 during a websocket connect """ token = "A" * len( - getattr(hashlib, client_config.get("hash_type", "md5"))().hexdigest() + getattr( + hashlib, client_config.get("hash_type", DEFAULT_HASH_TYPE) + )().hexdigest() ) url = "ws://127.0.0.1:{}/all_events/{}".format(http_server.port, token) diff --git a/tests/pytests/functional/pillar/test_git_pillar.py b/tests/pytests/functional/pillar/test_git_pillar.py new file mode 100644 index 000000000000..6fd3dee431b1 --- /dev/null +++ b/tests/pytests/functional/pillar/test_git_pillar.py @@ -0,0 +1,262 @@ +import pytest + +from salt.pillar.git_pillar import ext_pillar +from salt.utils.immutabletypes import ImmutableDict, ImmutableList +from tests.support.mock import patch + +pytestmark = [ + pytest.mark.slow_test, +] + + +try: + import git # pylint: disable=unused-import + + HAS_GITPYTHON = True +except ImportError: + HAS_GITPYTHON = False + + +try: + import pygit2 # pylint: disable=unused-import + + HAS_PYGIT2 = True +except ImportError: + HAS_PYGIT2 = False + + +skipif_no_gitpython = pytest.mark.skipif(not HAS_GITPYTHON, reason="Missing gitpython") +skipif_no_pygit2 = pytest.mark.skipif(not HAS_PYGIT2, reason="Missing pygit2") + + +@pytest.fixture +def git_pillar_opts(salt_master, tmp_path): + opts = dict(salt_master.config) + opts["cachedir"] = str(tmp_path) + for key, item in opts.items(): + if isinstance(item, ImmutableDict): + opts[key] = dict(item) + elif isinstance(item, ImmutableList): + opts[key] = list(item) + return opts + + +@pytest.fixture +def gitpython_pillar_opts(git_pillar_opts): + git_pillar_opts["verified_git_pillar_provider"] = "gitpython" + return git_pillar_opts + + +@pytest.fixture +def pygit2_pillar_opts(git_pillar_opts): + git_pillar_opts["verified_git_pillar_provider"] = "pygit2" + return git_pillar_opts + + +def _get_ext_pillar(minion, pillar_opts, grains, *repos): + with patch("salt.pillar.git_pillar.__opts__", pillar_opts, create=True): + with patch("salt.pillar.git_pillar.__grains__", grains, create=True): + return ext_pillar(minion, None, *repos) + + +def _test_simple(pillar_opts, grains): + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + assert data == {"key": "value"} + + +@skipif_no_gitpython +def test_gitpython_simple(gitpython_pillar_opts, grains): + _test_simple(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_simple(pygit2_pillar_opts, grains): + _test_simple(pygit2_pillar_opts, grains) + + +def _test_missing_env(pillar_opts, grains): + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + { + "https://github.com/saltstack/salt-test-pillar-gitfs.git": [ + {"env": "misssing"} + ] + }, + ) + assert data == {} + + +@skipif_no_gitpython +def test_gitpython_missing_env(gitpython_pillar_opts, grains): + _test_missing_env(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_missing_env(pygit2_pillar_opts, grains): + _test_missing_env(pygit2_pillar_opts, grains) + + +def _test_env(pillar_opts, grains): + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + { + "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git": [ + {"env": "other_env"} + ] + }, + ) + assert data == {"other": "env"} + + +@skipif_no_gitpython +def test_gitpython_env(gitpython_pillar_opts, grains): + _test_env(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_env(pygit2_pillar_opts, grains): + _test_env(pygit2_pillar_opts, grains) + + +def _test_branch(pillar_opts, grains): + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + "branch https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + assert data == {"key": "data"} + + +@skipif_no_gitpython +def test_gitpython_branch(gitpython_pillar_opts, grains): + _test_branch(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_branch(pygit2_pillar_opts, grains): + _test_branch(pygit2_pillar_opts, grains) + + +def _test_simple_dynamic(pillar_opts, grains): + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + assert data == {"key": "value"} + + +@skipif_no_gitpython +def test_gitpython_simple_dynamic(gitpython_pillar_opts, grains): + _test_simple_dynamic(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_simple_dynamic(pygit2_pillar_opts, grains): + _test_simple_dynamic(pygit2_pillar_opts, grains) + + +def _test_missing_env_dynamic(pillar_opts, grains): + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + { + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git": [ + {"env": "misssing"} + ] + }, + ) + assert data == {} + + +@skipif_no_gitpython +def test_gitpython_missing_env_dynamic(gitpython_pillar_opts, grains): + _test_missing_env_dynamic(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_missing_env_dynamic(pygit2_pillar_opts, grains): + _test_missing_env_dynamic(pygit2_pillar_opts, grains) + + +def _test_pillarenv_dynamic(pillar_opts, grains): + pillar_opts["pillarenv"] = "branch" + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + assert data == {"key": "data"} + + +@skipif_no_gitpython +def test_gitpython_pillarenv_dynamic(gitpython_pillar_opts, grains): + _test_pillarenv_dynamic(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_pillarenv_dynamic(pygit2_pillar_opts, grains): + _test_pillarenv_dynamic(pygit2_pillar_opts, grains) + + +def _test_multiple(pillar_opts, grains): + pillar_opts["pillarenv"] = "branch" + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git", + "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + ) + assert data == {"key": "data"} + + +@skipif_no_gitpython +def test_gitpython_multiple(gitpython_pillar_opts, grains): + _test_multiple(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_multiple(pygit2_pillar_opts, grains): + _test_multiple(pygit2_pillar_opts, grains) + + +def _test_multiple_2(pillar_opts, grains): + data = _get_ext_pillar( + "minion", + pillar_opts, + grains, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + "https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + ) + assert data == { + "key": "value", + "key1": "value1", + "key2": "value2", + "key4": "value4", + "data1": "d", + "data2": "d2", + } + + +@skipif_no_gitpython +def test_gitpython_multiple_2(gitpython_pillar_opts, grains): + _test_multiple_2(gitpython_pillar_opts, grains) + + +@skipif_no_pygit2 +def test_pygit2_multiple_2(pygit2_pillar_opts, grains): + _test_multiple_2(pygit2_pillar_opts, grains) diff --git a/tests/pytests/functional/states/file/test__check_directory_win.py b/tests/pytests/functional/states/file/test__check_directory_win.py index 1ce94d064257..6568be82304a 100644 --- a/tests/pytests/functional/states/file/test__check_directory_win.py +++ b/tests/pytests/functional/states/file/test__check_directory_win.py @@ -6,6 +6,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/states/file/test_append.py b/tests/pytests/functional/states/file/test_append.py index 32654edc896e..874d5d207145 100644 --- a/tests/pytests/functional/states/file/test_append.py +++ b/tests/pytests/functional/states/file/test_append.py @@ -138,3 +138,25 @@ def test_issue_1896_file_append_source(file, tmp_path, state_tree): testfile_contents = testfile.read_text() assert testfile_contents == FIRST_IF_CONTENTS + SECOND_IF_CONTENTS + + +def test_file_append_check_cmd(modules, state_tree, tmp_path): + """ + Test that check_cmd works for file.append + and those states do not run. + """ + sls_contents = f""" +append_in_file: + file.append: + - name: /tmp/test + - text: "appended text" + - check_cmd: + - "djasjahj" + """ + with pytest.helpers.temp_file( + "file-append-check-cmd.sls", sls_contents, state_tree + ): + ret = modules.state.sls("file-append-check-cmd") + for state_run in ret: + assert state_run.result is False + assert state_run.comment == "check_cmd determined the state failed" diff --git a/tests/pytests/functional/states/file/test_cached.py b/tests/pytests/functional/states/file/test_cached.py new file mode 100644 index 000000000000..1b0523820712 --- /dev/null +++ b/tests/pytests/functional/states/file/test_cached.py @@ -0,0 +1,96 @@ +import secrets + +import pytest + +import salt.states.file as file +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, +] + + +@pytest.fixture +def configure_loader_modules(): + return { + file: {"__opts__": {"test": False}}, + } + + +def test_cached_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "File will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value=""), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "File already cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=source_hash), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_different_hash_test_true(): + name = "salt://test/file.exe" + source_hash = secrets.token_hex(nbytes=32) + existing_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "Hashes don't match.\nFile will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=existing_hash), + "file.get_source_sum": MagicMock(return_value={"hsum": source_hash}), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name, source_hash=source_hash) + assert result == expected + + +def test_cached_present_no_source_hash_test_true(): + name = "salt://test/file.exe" + existing_hash = secrets.token_hex(nbytes=32) + expected = { + "changes": {}, + "comment": "No hash found. File will be cached: {}".format(name), + "name": name, + "result": None, + } + salt = { + "cp.is_cached": MagicMock(return_value="path/to/file"), + "file.get_hash": MagicMock(return_value=existing_hash), + } + opts = {"test": True, "hash_type": "sha256"} + with patch.dict(file.__salt__, salt), patch.dict(file.__opts__, opts): + result = file.cached(name=name) + assert result == expected diff --git a/tests/pytests/functional/states/file/test_comment.py b/tests/pytests/functional/states/file/test_comment.py index 5b439b022984..b7a7c8a7c951 100644 --- a/tests/pytests/functional/states/file/test_comment.py +++ b/tests/pytests/functional/states/file/test_comment.py @@ -10,6 +10,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.slow_test, ] @@ -105,7 +106,7 @@ def test_issue_2401_file_comment(modules, tmp_path): tmp_file.write_text("hello\nworld\n") # create the sls template template_lines = [ - "{}:".format(tmp_file), + f"{tmp_file}:", " file.comment:", " - regex: ^world", ] @@ -121,3 +122,16 @@ def test_issue_2401_file_comment(modules, tmp_path): for state_run in ret: assert state_run.result is True assert "Pattern already commented" in state_run.comment + + +def test_issue_65501(file, tmp_path): + tmp_file = tmp_path / "issue-65501.txt" + tmp_file.write_text("first\n#PermitRootLogin prohibit-password\nlast") + ret = file.comment( + name=str(tmp_file), + regex="^PermitRootLogin[ \t]+.*$", + char="# NEXT LINE COMMENT SALTSTACK openssh-server_comment_permitrootlogin_sshd_config\n# ", + ignore_missing=True, + ) + assert ret.result is True + assert ret.comment == "Pattern not found and ignore_missing set to True" diff --git a/tests/pytests/functional/states/file/test_directory.py b/tests/pytests/functional/states/file/test_directory.py index bb56f5416f23..82a3f7f154c6 100644 --- a/tests/pytests/functional/states/file/test_directory.py +++ b/tests/pytests/functional/states/file/test_directory.py @@ -113,6 +113,48 @@ def _get_oct_mode(name): assert _mode == _get_oct_mode(untouched_dir) +@pytest.mark.skip_on_windows +def test_directory_children_only(file, tmp_path): + """ + file.directory with children_only=True + """ + + name = tmp_path / "directory_children_only_dir" + name.mkdir(0o0700) + + strayfile = name / "strayfile" + strayfile.touch() + os.chmod(strayfile, 0o700) + + straydir = name / "straydir" + straydir.mkdir(0o0700) + + # none of the children nor parent are currently set to the correct mode + ret = file.directory( + name=str(name), + file_mode="0644", + dir_mode="0755", + recurse=["mode"], + children_only=True, + ) + assert ret.result is True + + # Assert parent directory's mode remains unchanged + assert ( + oct(name.stat().st_mode)[-3:] == "700" + ), f"Expected mode 700 for {name}, got {oct(name.stat().st_mode)[-3:]}" + + # Assert child file's mode is changed + assert ( + oct(strayfile.stat().st_mode)[-3:] == "644" + ), f"Expected mode 644 for {strayfile}, got {oct(strayfile.stat().st_mode)[-3:]}" + + # Assert child directory's mode is changed + assert ( + oct(straydir.stat().st_mode)[-3:] == "755" + ), f"Expected mode 755 for {straydir}, got {oct(straydir.stat().st_mode)[-3:]}" + + def test_directory_clean(file, tmp_path): """ file.directory with clean=True diff --git a/tests/pytests/functional/states/file/test_directory_win.py b/tests/pytests/functional/states/file/test_directory_win.py index 0b622be6ff03..685f48195c37 100644 --- a/tests/pytests/functional/states/file/test_directory_win.py +++ b/tests/pytests/functional/states/file/test_directory_win.py @@ -2,8 +2,6 @@ import pytest -import salt.modules.win_file as win_file -import salt.states.file as file import salt.utils.win_dacl as win_dacl import salt.utils.win_functions as win_functions @@ -16,31 +14,11 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] -@pytest.fixture(scope="module") -def configure_loader_modules(): - return { - file: { - "__opts__": {"test": False}, - "__salt__": { - "file.mkdir": win_file.mkdir, - "file.check_perms": win_file.check_perms, - }, - }, - win_file: { - "__utils__": { - "dacl.check_perms": win_dacl.check_perms, - "dacl.set_owner": win_dacl.set_owner, - "dacl.set_perms": win_dacl.set_perms, - }, - }, - win_dacl: {"__opts__": {"test": False}}, - } - - -def test_directory_new(tmp_path): +def test_directory_new(file, tmp_path): """ Test file.directory when the directory does not exist Should just return "New Dir" @@ -106,7 +84,7 @@ def test_directory_new(tmp_path): assert permissions == expected -def test_directory_new_no_inherit(tmp_path): +def test_directory_new_no_inherit(file, tmp_path): """ Test file.directory when the directory does not exist Should just return "New Dir" @@ -126,7 +104,7 @@ def test_directory_new_no_inherit(tmp_path): assert permissions["Inherited"] == {} -def test_directory_new_reset(tmp_path): +def test_directory_new_reset(file, tmp_path): """ Test file.directory when the directory does not exist Should just return "New Dir" @@ -181,7 +159,7 @@ def test_directory_new_reset(tmp_path): assert permissions == expected -def test_directory_new_reset_no_inherit(tmp_path): +def test_directory_new_reset_no_inherit(file, tmp_path): """ Test file.directory when the directory does not exist Should just return "New Dir" @@ -218,7 +196,7 @@ def test_directory_new_reset_no_inherit(tmp_path): assert permissions == expected -def test_directory_existing(tmp_path): +def test_directory_existing(file, tmp_path): path = str(tmp_path) ret = file.directory( name=path, @@ -292,7 +270,7 @@ def test_directory_existing(tmp_path): assert permissions == expected -def test_directory_existing_existing_user(tmp_path): +def test_directory_existing_existing_user(file, tmp_path): path = str(tmp_path) win_dacl.set_permissions( obj_name=path, @@ -373,7 +351,7 @@ def test_directory_existing_existing_user(tmp_path): assert permissions == expected -def test_directory_existing_no_inherit(tmp_path): +def test_directory_existing_no_inherit(file, tmp_path): path = str(tmp_path) ret = file.directory( name=path, @@ -397,7 +375,7 @@ def test_directory_existing_no_inherit(tmp_path): assert permissions["Inherited"] == {} -def test_directory_existing_reset(tmp_path): +def test_directory_existing_reset(file, tmp_path): path = str(tmp_path) win_dacl.set_permissions( obj_name=path, @@ -461,7 +439,7 @@ def test_directory_existing_reset(tmp_path): assert permissions == expected -def test_directory_existing_reset_no_inherit(tmp_path): +def test_directory_existing_reset_no_inherit(file, tmp_path): path = str(tmp_path) ret = file.directory( name=path, diff --git a/tests/pytests/functional/states/file/test_pruned.py b/tests/pytests/functional/states/file/test_pruned.py index 101fa76d2cc6..80d4f94b6c2b 100644 --- a/tests/pytests/functional/states/file/test_pruned.py +++ b/tests/pytests/functional/states/file/test_pruned.py @@ -2,6 +2,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/states/file/test_rename.py b/tests/pytests/functional/states/file/test_rename.py index ffffb76ec8a0..db293d16277d 100644 --- a/tests/pytests/functional/states/file/test_rename.py +++ b/tests/pytests/functional/states/file/test_rename.py @@ -9,6 +9,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/states/file/test_replace.py b/tests/pytests/functional/states/file/test_replace.py index 887f9759c448..22dab8168042 100644 --- a/tests/pytests/functional/states/file/test_replace.py +++ b/tests/pytests/functional/states/file/test_replace.py @@ -376,3 +376,27 @@ def test_file_replace_prerequired_issues_55775(modules, state_tree, tmp_path): assert state_run.result is True assert managed_file.exists() + + +def test_file_replace_check_cmd(modules, state_tree, tmp_path): + """ + Test that check_cmd works for file.replace + and those states do not run. + """ + sls_contents = f""" +replace_in_file: + file.replace: + - name: /tmp/test + - pattern: hi + - repl: "replacement text" + - append_if_not_found: True + - check_cmd: + - "djasjahj" + """ + with pytest.helpers.temp_file( + "file-replace-check-cmd.sls", sls_contents, state_tree + ): + ret = modules.state.sls("file-replace-check-cmd") + for state_run in ret: + assert state_run.result is False + assert state_run.comment == "check_cmd determined the state failed" diff --git a/tests/pytests/functional/states/pkgrepo/test_centos.py b/tests/pytests/functional/states/pkgrepo/test_centos.py index 6a84f96ac986..c02da519d2fb 100644 --- a/tests/pytests/functional/states/pkgrepo/test_centos.py +++ b/tests/pytests/functional/states/pkgrepo/test_centos.py @@ -242,7 +242,11 @@ def copr_pkgrepo_with_comments_name(pkgrepo, grains): or grains["os"] == "VMware Photon OS" ): pytest.skip("copr plugin not installed on {} CI".format(grains["osfinger"])) - if grains["os"] in ("CentOS Stream", "AlmaLinux") and grains["osmajorrelease"] == 9: + if ( + grains["os"] in ("CentOS Stream", "AlmaLinux") + and grains["osmajorrelease"] == 9 + or grains["osfinger"] == "Amazon Linux-2023" + ): pytest.skip("No repo for {} in test COPR yet".format(grains["osfinger"])) pkgrepo_name = "hello-copr" try: diff --git a/tests/pytests/functional/states/pkgrepo/test_debian.py b/tests/pytests/functional/states/pkgrepo/test_debian.py index adbea0089515..b8c8344c095d 100644 --- a/tests/pytests/functional/states/pkgrepo/test_debian.py +++ b/tests/pytests/functional/states/pkgrepo/test_debian.py @@ -10,8 +10,10 @@ import attr import pytest +import salt.modules.aptpkg import salt.utils.files from tests.conftest import CODE_DIR +from tests.support.mock import MagicMock, patch PYTEST_GE_7 = getattr(_pytest._version, "version_tuple", (-1, -1)) >= (7, 0) @@ -21,6 +23,7 @@ pytestmark = [ pytest.mark.destructive_test, pytest.mark.skip_if_not_root, + pytest.mark.slow_test, ] @@ -37,12 +40,12 @@ def pkgrepo(states, grains): @pytest.mark.requires_salt_states("pkgrepo.managed") -def test_adding_repo_file(pkgrepo, tmp_path): +def test_adding_repo_file(pkgrepo, repo_uri, tmp_path): """ test adding a repo file using pkgrepo.managed """ repo_file = str(tmp_path / "stable-binary.list") - repo_content = "deb http://www.deb-multimedia.org stable main" + repo_content = f"deb {repo_uri} stable main" ret = pkgrepo.managed(name=repo_content, file=repo_file, clean_file=True) with salt.utils.files.fopen(repo_file, "r") as fp: file_content = fp.read() @@ -50,30 +53,24 @@ def test_adding_repo_file(pkgrepo, tmp_path): @pytest.mark.requires_salt_states("pkgrepo.managed") -def test_adding_repo_file_arch(pkgrepo, tmp_path, subtests): +def test_adding_repo_file_arch(pkgrepo, repo_uri, tmp_path, subtests): """ test adding a repo file using pkgrepo.managed and setting architecture """ repo_file = str(tmp_path / "stable-binary.list") - repo_content = "deb [arch=amd64 ] http://www.deb-multimedia.org stable main" + repo_content = f"deb [arch=amd64 ] {repo_uri} stable main" pkgrepo.managed(name=repo_content, file=repo_file, clean_file=True) with salt.utils.files.fopen(repo_file, "r") as fp: file_content = fp.read() - assert ( - file_content.strip() - == "deb [arch=amd64] http://www.deb-multimedia.org stable main" - ) + assert file_content.strip() == f"deb [arch=amd64] {repo_uri} stable main" with subtests.test("With multiple archs"): - repo_content = ( - "deb [arch=amd64,i386 ] http://www.deb-multimedia.org stable main" - ) + repo_content = f"deb [arch=amd64,i386 ] {repo_uri} stable main" pkgrepo.managed(name=repo_content, file=repo_file, clean_file=True) with salt.utils.files.fopen(repo_file, "r") as fp: file_content = fp.read() assert ( - file_content.strip() - == "deb [arch=amd64,i386] http://www.deb-multimedia.org stable main" + file_content.strip() == f"deb [arch=amd64,i386] {repo_uri} stable main" ) @@ -96,7 +93,7 @@ def test_adding_repo_file_cdrom(pkgrepo, tmp_path): def system_aptsources_ids(value): - return "{}(aptsources.sourceslist)".format(value.title()) + return f"{value.title()}(aptsources.sourceslist)" @pytest.fixture( @@ -120,7 +117,7 @@ def system_aptsources(request, grains): raise pytest.skip.Exception( "This test is meant to run without the system aptsources package, but it's " "available from '{}'.".format(sourceslist.__file__), - **exc_kwargs + **exc_kwargs, ) else: # Run the test @@ -376,7 +373,7 @@ def test_pkgrepo_with_architectures(pkgrepo, grains, sources_list_file, subtests ) def _get_arch(arch): - return "[arch={}] ".format(arch) if arch else "" + return f"[arch={arch}] " if arch else "" def _run(arch=None, test=False): return pkgrepo.managed( @@ -496,6 +493,11 @@ def _run(arch=None, test=False): assert ret.result is True +@pytest.fixture(scope="module") +def repo_uri(): + yield "http://www.deb-multimedia.org" + + @pytest.fixture def trailing_slash_repo_file(grains): if grains["os_family"] != "Debian": @@ -515,19 +517,21 @@ def trailing_slash_repo_file(grains): @pytest.mark.requires_salt_states("pkgrepo.managed", "pkgrepo.absent") -def test_repo_present_absent_trailing_slash_uri(pkgrepo, trailing_slash_repo_file): +def test_repo_present_absent_trailing_slash_uri( + pkgrepo, repo_uri, trailing_slash_repo_file +): """ - test adding a repo with a trailing slash in the uri + test adding and then removing a repo with a trailing slash in the uri """ # with the trailing slash - repo_content = "deb http://www.deb-multimedia.org/ stable main" + repo_content = f"deb {repo_uri}/ stable main" # initial creation ret = pkgrepo.managed( name=repo_content, file=trailing_slash_repo_file, refresh=False, clean_file=True ) with salt.utils.files.fopen(trailing_slash_repo_file, "r") as fp: file_content = fp.read() - assert file_content.strip() == "deb http://www.deb-multimedia.org/ stable main" + assert file_content.strip() == f"deb {repo_uri}/ stable main" assert ret.changes # no changes ret = pkgrepo.managed( @@ -540,19 +544,21 @@ def test_repo_present_absent_trailing_slash_uri(pkgrepo, trailing_slash_repo_fil @pytest.mark.requires_salt_states("pkgrepo.managed", "pkgrepo.absent") -def test_repo_present_absent_no_trailing_slash_uri(pkgrepo, trailing_slash_repo_file): +def test_repo_present_absent_no_trailing_slash_uri( + pkgrepo, repo_uri, trailing_slash_repo_file +): """ test adding a repo with a trailing slash in the uri """ # without the trailing slash - repo_content = "deb http://www.deb-multimedia.org stable main" + repo_content = f"deb {repo_uri} stable main" # initial creation ret = pkgrepo.managed( name=repo_content, file=trailing_slash_repo_file, refresh=False, clean_file=True ) with salt.utils.files.fopen(trailing_slash_repo_file, "r") as fp: file_content = fp.read() - assert file_content.strip() == "deb http://www.deb-multimedia.org stable main" + assert file_content.strip() == repo_content assert ret.changes # no changes ret = pkgrepo.managed( @@ -566,35 +572,81 @@ def test_repo_present_absent_no_trailing_slash_uri(pkgrepo, trailing_slash_repo_ @pytest.mark.requires_salt_states("pkgrepo.managed", "pkgrepo.absent") def test_repo_present_absent_no_trailing_slash_uri_add_slash( - pkgrepo, trailing_slash_repo_file + pkgrepo, repo_uri, trailing_slash_repo_file ): """ test adding a repo without a trailing slash, and then running it again with a trailing slash. """ # without the trailing slash - repo_content = "deb http://www.deb-multimedia.org stable main" + repo_content = f"deb {repo_uri} stable main" # initial creation ret = pkgrepo.managed( name=repo_content, file=trailing_slash_repo_file, refresh=False, clean_file=True ) with salt.utils.files.fopen(trailing_slash_repo_file, "r") as fp: file_content = fp.read() - assert file_content.strip() == "deb http://www.deb-multimedia.org stable main" + assert file_content.strip() == repo_content assert ret.changes # now add a trailing slash in the name - repo_content = "deb http://www.deb-multimedia.org/ stable main" + repo_content = f"deb {repo_uri}/ stable main" ret = pkgrepo.managed( name=repo_content, file=trailing_slash_repo_file, refresh=False ) with salt.utils.files.fopen(trailing_slash_repo_file, "r") as fp: file_content = fp.read() - assert file_content.strip() == "deb http://www.deb-multimedia.org/ stable main" + assert file_content.strip() == repo_content # absent ret = pkgrepo.absent(name=repo_content) assert ret.result +@pytest.mark.requires_salt_states("pkgrepo.absent") +def test_repo_absent_existing_repo_trailing_slash_uri( + pkgrepo, repo_uri, subtests, trailing_slash_repo_file +): + """ + Test pkgrepo.absent with a URI containing a trailing slash + + This test is different from test_repo_present_absent_trailing_slash_uri. + That test first does a pkgrepo.managed with a URI containing a trailing + slash. Since pkgrepo.managed normalizes the URI by removing the trailing + slash, the resulting repo file created by Salt does not contain one. This + tests the case where Salt is asked to remove an existing repo with a + trailing slash in the repo URI. + + See https://github.com/saltstack/salt/issues/64286 + """ + repo_file = pathlib.Path(trailing_slash_repo_file) + repo_content = f"deb [arch=amd64] {repo_uri}/ stable main" + + with subtests.test("Remove repo with trailing slash in URI"): + # Write contents to file with trailing slash in URI + repo_file.write_text(f"{repo_content}\n") + # Perform and validate removal + ret = pkgrepo.absent(name=repo_content) + assert ret.result + assert ret.changes + assert not repo_file.exists() + # A second run of the pkgrepo.absent state should be a no-op (i.e. no changes) + ret = pkgrepo.absent(name=repo_content) + assert ret.result + assert not ret.changes + assert not repo_file.exists() + + with subtests.test("URI match with mismatched arch"): + # Create a repo file that matches the URI but contains no architecture. + # This should not be identified as a match for repo_content, and thus + # the result of a state should be a no-op. + repo_file.write_text(f"deb {repo_uri} stable main\n") + # Since this was a no-op, the state should have succeeded, made no + # changes, and left the repo file in place. + ret = pkgrepo.absent(name=repo_content) + assert ret.result + assert not ret.changes + assert repo_file.exists() + + @attr.s(kw_only=True) class Repo: key_root = attr.ib(default=pathlib.Path("/usr", "share", "keyrings")) @@ -621,6 +673,10 @@ def _default_alt_repo(self): if ( self.grains["osfullname"] == "Ubuntu" and self.grains["osrelease"] == "22.04" + or "Debian" in self.grains["osfullname"] + and self.grains["osrelease"] == "12" + # only need to use alt repo until + # we release Debian 12 salt packages ): return True return False @@ -715,6 +771,38 @@ def _run(test=False): assert ret.changes == {} +def test_adding_repo_file_signedby_invalid_name(pkgrepo, states, repo): + """ + Test adding a repo file using pkgrepo.managed + and setting signedby and the name is invalid. + Ensure we raise an error. + """ + + default_sources = pathlib.Path("/etc", "apt", "sources.list") + with salt.utils.files.fopen(default_sources, "r") as fp: + pre_file_content = fp.read() + + ret = states.pkgrepo.managed( + name=repo.repo_content.strip("deb"), + file=str(repo.repo_file), + clean_file=True, + signedby=str(repo.key_file), + key_url=repo.key_url, + aptkey=False, + test=False, + ) + + assert "Failed to configure repo" in ret.comment + assert "This must be the complete repo entry" in ret.comment + with salt.utils.files.fopen(str(repo.repo_file), "r") as fp: + file_content = fp.read() + assert not file_content + + with salt.utils.files.fopen(default_sources, "r") as fp: + post_file_content = fp.read() + assert pre_file_content == post_file_content + + def test_adding_repo_file_signedby_keyserver(pkgrepo, states, repo): """ Test adding a repo file using pkgrepo.managed @@ -788,3 +876,56 @@ def test_adding_repo_file_signedby_alt_file(pkgrepo, states, repo): assert file_content.endswith("\n") assert key_file.is_file() assert repo_content in ret.comment + + +def test_adding_repo_file_signedby_fail_key_keyid( + pkgrepo, states, repo, subtests, modules +): + """ + Test adding a repo file using pkgrepo.managed + and setting signedby and keyid when adding the key fails + an error is returned + """ + + def _run(test=False): + return states.pkgrepo.managed( + name=repo.repo_content, + file=str(repo.repo_file), + clean_file=True, + signedby=str(repo.key_file), + keyid="10857FFDD3F91EAE577A21D664CBBC8173D76B3F1", + keyserver="keyserver.ubuntu.com", + aptkey=False, + test=test, + keydir="/tmp/test", + ) + + ret = _run() + assert "Failed to configure repo" in ret.comment + assert "Could not add key" in ret.comment + + +def test_adding_repo_file_signedby_fail_key_keyurl( + pkgrepo, states, repo, subtests, modules +): + """ + Test adding a repo file using pkgrepo.managed + and setting signedby and keyurl when adding the key fails + an error is returned + """ + + def _run(test=False): + with patch( + "salt.utils.path.which", MagicMock(side_effect=[True, True, False, False]) + ): + return states.pkgrepo.managed( + name=repo.repo_content, + file=str(repo.repo_file), + clean_file=True, + key_url="https://repo.saltproject.io/salt/py3/ubuntu/20.04/amd64/latest/SALT-PROJECT-GPG-PUBKEY-2023.pub", + aptkey=False, + ) + + ret = _run() + assert "Failed to configure repo" in ret.comment + assert "Could not add key" in ret.comment diff --git a/tests/pytests/functional/states/test_archive.py b/tests/pytests/functional/states/test_archive.py index b3bb6c0fc9af..8d1e47558503 100644 --- a/tests/pytests/functional/states/test_archive.py +++ b/tests/pytests/functional/states/test_archive.py @@ -41,7 +41,7 @@ def do_GET(self): ) as reqfp: return_data = reqfp.read() # We're using this checksum as the etag to show file changes - checksum = hashlib.md5(return_data).hexdigest() + checksum = hashlib.sha256(return_data).hexdigest() if none_match == checksum: # Status code 304 Not Modified is returned if the file is unchanged status_code = 304 @@ -113,6 +113,7 @@ def web_root(tmp_path_factory): shutil.rmtree(str(_web_root), ignore_errors=True) +@pytest.mark.slow_test def test_archive_extracted_web_source_etag_operation( modules, states, free_port, web_root, minion_opts ): @@ -146,7 +147,7 @@ def test_archive_extracted_web_source_etag_operation( minion_opts["cachedir"], "extrn_files", "base", - "localhost:{free_port}".format(free_port=free_port), + "localhost{free_port}".format(free_port=free_port), "foo.tar.gz", ) cached_etag = cached_file + ".etag" diff --git a/tests/pytests/functional/states/test_chocolatey_1_2_1.py b/tests/pytests/functional/states/test_chocolatey_1_2_1.py new file mode 100644 index 000000000000..9dcc186636a1 --- /dev/null +++ b/tests/pytests/functional/states/test_chocolatey_1_2_1.py @@ -0,0 +1,137 @@ +""" +Functional tests for chocolatey state +""" +import os +import pathlib + +import pytest + +import salt.utils.path +import salt.utils.win_reg + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, + pytest.mark.destructive_test, +] + + +@pytest.fixture(scope="module") +def chocolatey(states): + yield states.chocolatey + + +@pytest.fixture(scope="module") +def chocolatey_mod(modules): + + current_path = salt.utils.win_reg.read_value( + hive="HKLM", + key=r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", + vname="PATH", + )["vdata"] + url = "https://packages.chocolatey.org/chocolatey.1.2.1.nupkg" + with pytest.helpers.temp_file(name="choco.nupkg") as nupkg: + choco_pkg = pathlib.Path(str(nupkg)) + choco_dir = choco_pkg.parent / "choco_dir" + choco_script = choco_dir / "tools" / "chocolateyInstall.ps1" + + def install(): + # Install Chocolatey 1.2.1 + + # Download Package + modules.cp.get_url(path=url, dest=str(choco_pkg)) + + # Unzip Package + modules.archive.unzip( + zip_file=str(choco_pkg), + dest=str(choco_dir), + extract_perms=False, + ) + + # Run installer script + assert choco_script.exists() + result = modules.cmd.script( + source=str(choco_script), + cwd=str(choco_script.parent), + shell="powershell", + python_shell=True, + ) + assert result["retcode"] == 0 + + def uninstall(): + choco_dir = os.environ.get("ChocolateyInstall", False) + if choco_dir: + # Remove Chocolatey Directory + modules.file.remove(path=choco_dir, force=True) + # Remove Chocolatey Environment Variables + for env_var in modules.environ.items(): + if env_var.lower().startswith("chocolatey"): + modules.environ.setval( + key=env_var, val=False, false_unsets=True, permanent="HKLM" + ) + modules.environ.setval( + key=env_var, val=False, false_unsets=True, permanent="HKCU" + ) + salt.utils.win_reg.set_value( + hive="HKLM", + key=r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", + vname="PATH", + vdata=current_path, + ) + modules.win_path.rehash() + + # Remove unknown version + if salt.utils.path.which("choco.exe"): + uninstall() + + # Install known version + install() + + yield modules.chocolatey + + # Remove + uninstall() + + +@pytest.fixture(scope="function") +def clean(chocolatey_mod): + chocolatey_mod.uninstall(name="vim", force=True) + yield + chocolatey_mod.uninstall(name="vim", force=True) + + +@pytest.fixture(scope="function") +def vim(chocolatey_mod): + chocolatey_mod.install(name="vim", version="9.0.1672") + yield + chocolatey_mod.uninstall(name="vim", force=True) + + +def test_installed_latest(clean, chocolatey, chocolatey_mod): + chocolatey.installed(name="vim") + result = chocolatey_mod.version(name="vim") + assert "vim" in result + + +def test_installed_version(clean, chocolatey, chocolatey_mod): + chocolatey.installed(name="vim", version="9.0.1672") + result = chocolatey_mod.version(name="vim") + assert "vim" in result + assert result["vim"]["installed"][0] == "9.0.1672" + + +def test_uninstalled(vim, chocolatey, chocolatey_mod): + chocolatey.uninstalled(name="vim") + result = chocolatey_mod.version(name="vim") + assert "vim" not in result + + +def test_upgraded(vim, chocolatey, chocolatey_mod): + result = chocolatey_mod.version(name="vim") + assert "vim" in result + assert result["vim"]["installed"][0] == "9.0.1672" + chocolatey.upgraded(name="vim", version="9.0.1677") + result = chocolatey_mod.version(name="vim") + assert "vim" in result + assert result["vim"]["installed"][0] == "9.0.1677" diff --git a/tests/pytests/functional/states/test_chocolatey_latest.py b/tests/pytests/functional/states/test_chocolatey_latest.py new file mode 100644 index 000000000000..9d329d5fc597 --- /dev/null +++ b/tests/pytests/functional/states/test_chocolatey_latest.py @@ -0,0 +1,137 @@ +""" +Functional tests for chocolatey state +""" +import os +import pathlib + +import pytest + +import salt.utils.path +import salt.utils.win_reg + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, + pytest.mark.destructive_test, +] + + +@pytest.fixture(scope="module") +def chocolatey(states): + yield states.chocolatey + + +@pytest.fixture(scope="module") +def chocolatey_mod(modules): + + current_path = salt.utils.win_reg.read_value( + hive="HKLM", + key=r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", + vname="PATH", + )["vdata"] + url = "https://community.chocolatey.org/api/v2/package/chocolatey/" + with pytest.helpers.temp_file(name="choco.nupkg") as nupkg: + choco_pkg = pathlib.Path(str(nupkg)) + choco_dir = choco_pkg.parent / "choco_dir" + choco_script = choco_dir / "tools" / "chocolateyInstall.ps1" + + def install(): + # Install Chocolatey 1.2.1 + + # Download Package + modules.cp.get_url(path=url, dest=str(choco_pkg)) + + # Unzip Package + modules.archive.unzip( + zip_file=str(choco_pkg), + dest=str(choco_dir), + extract_perms=False, + ) + + # Run installer script + assert choco_script.exists() + result = modules.cmd.script( + source=str(choco_script), + cwd=str(choco_script.parent), + shell="powershell", + python_shell=True, + ) + assert result["retcode"] == 0 + + def uninstall(): + choco_dir = os.environ.get("ChocolateyInstall", False) + if choco_dir: + # Remove Chocolatey Directory + modules.file.remove(path=choco_dir, force=True) + # Remove Chocolatey Environment Variables + for env_var in modules.environ.items(): + if env_var.lower().startswith("chocolatey"): + modules.environ.setval( + key=env_var, val=False, false_unsets=True, permanent="HKLM" + ) + modules.environ.setval( + key=env_var, val=False, false_unsets=True, permanent="HKCU" + ) + salt.utils.win_reg.set_value( + hive="HKLM", + key=r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", + vname="PATH", + vdata=current_path, + ) + modules.win_path.rehash() + + # Remove unknown version + if salt.utils.path.which("choco.exe"): + uninstall() + + # Install known version + install() + + yield modules.chocolatey + + # Remove + uninstall() + + +@pytest.fixture(scope="function") +def clean(chocolatey_mod): + chocolatey_mod.uninstall(name="vim", force=True) + yield + chocolatey_mod.uninstall(name="vim", force=True) + + +@pytest.fixture(scope="function") +def vim(chocolatey_mod): + chocolatey_mod.install(name="vim", version="9.0.1672") + yield + chocolatey_mod.uninstall(name="vim", force=True) + + +def test_installed_latest(clean, chocolatey, chocolatey_mod): + chocolatey.installed(name="vim") + result = chocolatey_mod.version(name="vim") + assert "vim" in result + + +def test_installed_version(clean, chocolatey, chocolatey_mod): + chocolatey.installed(name="vim", version="9.0.1672") + result = chocolatey_mod.version(name="vim") + assert "vim" in result + assert result["vim"]["installed"][0] == "9.0.1672" + + +def test_uninstalled(vim, chocolatey, chocolatey_mod): + chocolatey.uninstalled(name="vim") + result = chocolatey_mod.version(name="vim") + assert "vim" not in result + + +def test_upgraded(vim, chocolatey, chocolatey_mod): + result = chocolatey_mod.version(name="vim") + assert "vim" in result + assert result["vim"]["installed"][0] == "9.0.1672" + chocolatey.upgraded(name="vim", version="9.0.1677") + result = chocolatey_mod.version(name="vim") + assert "vim" in result + assert result["vim"]["installed"][0] == "9.0.1677" diff --git a/tests/pytests/functional/states/test_etcd_mod.py b/tests/pytests/functional/states/test_etcd_mod.py index ede8debd1730..5a862b4c7bca 100644 --- a/tests/pytests/functional/states/test_etcd_mod.py +++ b/tests/pytests/functional/states/test_etcd_mod.py @@ -13,6 +13,7 @@ pytestmark = [ pytest.mark.skip_if_binaries_missing("docker", "dockerd", check_all=False), + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/states/test_file.py b/tests/pytests/functional/states/test_file.py index a3ad275e1418..9de115a01318 100644 --- a/tests/pytests/functional/states/test_file.py +++ b/tests/pytests/functional/states/test_file.py @@ -41,7 +41,7 @@ def do_GET(self): ) as reqfp: return_text = reqfp.read().encode("utf-8") # We're using this checksum as the etag to show file changes - checksum = hashlib.md5(return_text).hexdigest() + checksum = hashlib.sha256(return_text).hexdigest() if none_match == checksum: # Status code 304 Not Modified is returned if the file is unchanged status_code = 304 @@ -113,6 +113,7 @@ def web_root(tmp_path_factory): shutil.rmtree(str(_web_root), ignore_errors=True) +@pytest.mark.slow_test def test_file_managed_web_source_etag_operation( states, free_port, web_root, minion_opts ): @@ -138,7 +139,7 @@ def test_file_managed_web_source_etag_operation( minion_opts["cachedir"], "extrn_files", "base", - "localhost:{free_port}".format(free_port=free_port), + "localhost{free_port}".format(free_port=free_port), "foo.txt", ) cached_etag = cached_file + ".etag" @@ -200,3 +201,25 @@ def test_file_managed_web_source_etag_operation( # The modified time of the cached file now changes assert cached_file_mtime != os.path.getmtime(cached_file) + + +def test_file_symlink_replace_existing_link(states, tmp_path): + # symlink name and target for state + name = tmp_path / "foo" + target = tmp_path / "baz" + + # create existing symlink to replace + old_target = tmp_path / "bar" + name.symlink_to(old_target) + + ret = states.file.symlink( + name=str(name), + target=str(target), + ) + + assert ret.filtered == { + "name": str(name), + "changes": {"new": str(name)}, + "comment": f"Created new symlink {str(name)} -> {str(target)}", + "result": True, + } diff --git a/tests/pytests/functional/states/test_module.py b/tests/pytests/functional/states/test_module.py index bf2410ab529d..019c085c87b7 100644 --- a/tests/pytests/functional/states/test_module.py +++ b/tests/pytests/functional/states/test_module.py @@ -7,24 +7,22 @@ log = logging.getLogger(__name__) -@pytest.mark.slow_test +@pytest.mark.core_test def test_issue_58763(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ run_old: module.run: - name: test.random_hash - size: 10 - - hash_type: md5 + - hash_type: sha256 run_new: module.run: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-58763.sls", sls_contents, state_tree): @@ -39,17 +37,15 @@ def test_issue_58763(tmp_path, modules, state_tree, caplog): assert "Using new style module.run syntax: run_new" in caplog.messages -@pytest.mark.slow_test +@pytest.mark.core_test def test_issue_58763_a(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test.random_hash: module.run: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-58763.sls", sls_contents, state_tree): @@ -65,11 +61,9 @@ def test_issue_58763_a(tmp_path, modules, state_tree, caplog): ) -@pytest.mark.slow_test +@pytest.mark.core_test def test_issue_58763_b(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test.ping: @@ -87,11 +81,9 @@ def test_issue_58763_b(tmp_path, modules, state_tree, caplog): assert "Detected legacy module.run syntax: test.ping" in caplog.messages -@pytest.mark.slow_test +@pytest.mark.core_test def test_issue_62988_a(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test_foo: @@ -101,7 +93,7 @@ def test_issue_62988_a(tmp_path, modules, state_tree, caplog): module.wait: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 - watch: - test: test_foo """ @@ -117,11 +109,9 @@ def test_issue_62988_a(tmp_path, modules, state_tree, caplog): assert "Using new style module.run syntax: run_new" in caplog.messages -@pytest.mark.slow_test +@pytest.mark.core_test def test_issue_62988_b(tmp_path, modules, state_tree, caplog): - venv_dir = tmp_path / "issue-2028-pip-installed" - sls_contents = dedent( """ test_foo: @@ -133,7 +123,7 @@ def test_issue_62988_b(tmp_path, modules, state_tree, caplog): module.wait: - test.random_hash: - size: 10 - - hash_type: md5 + - hash_type: sha256 """ ) with pytest.helpers.temp_file("issue-62988.sls", sls_contents, state_tree): diff --git a/tests/pytests/functional/states/test_pip_state.py b/tests/pytests/functional/states/test_pip_state.py index 3fc6ac7a1dfc..551c1472feb0 100644 --- a/tests/pytests/functional/states/test_pip_state.py +++ b/tests/pytests/functional/states/test_pip_state.py @@ -25,6 +25,10 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def _win_user_where(username, password, program): cmd = "cmd.exe /c where {}".format(program) diff --git a/tests/pytests/functional/states/test_pkg.py b/tests/pytests/functional/states/test_pkg.py index 80df3b7b7251..c63dfb2784fa 100644 --- a/tests/pytests/functional/states/test_pkg.py +++ b/tests/pytests/functional/states/test_pkg.py @@ -43,10 +43,16 @@ def PKG_TARGETS(grains): if grains["os"] == "Windows": _PKG_TARGETS = ["vlc", "putty"] elif grains["os"] == "Amazon": - _PKG_TARGETS = ["lynx", "gnuplot"] + if grains["osfinger"] == "Amazon Linux-2023": + _PKG_TARGETS = ["lynx", "gnuplot-minimal"] + else: + _PKG_TARGETS = ["lynx", "gnuplot"] elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": - _PKG_TARGETS = ["wget", "zsh-html"] + if grains["osmajorrelease"] >= 5: + _PKG_TARGETS = ["wget", "zsh"] + else: + _PKG_TARGETS = ["wget", "zsh-html"] elif ( grains["os"] in ("CentOS Stream", "AlmaLinux") and grains["osmajorrelease"] == 9 @@ -64,7 +70,22 @@ def PKG_CAP_TARGETS(grains): _PKG_CAP_TARGETS = [] if grains["os_family"] == "Suse": if grains["os"] == "SUSE": - _PKG_CAP_TARGETS = [("perl(ZNC)", "znc-perl")] + _PKG_CAP_TARGETS = [("perl(YAML)", "perl-YAML")] + # sudo zypper install 'perl(YAML)' + # Loading repository data... + # Reading installed packages... + # 'perl(YAML)' not found in package names. Trying capabilities. + # Resolving package dependencies... + # + # The following NEW package is going to be installed: + # perl-YAML + # + # 1 new package to install. + # Overall download size: 85.3 KiB. Already cached: 0 B. After the operation, additional 183.3 KiB will be used. + # Continue? [y/n/v/...? shows all options] (y): + + # So, it just doesn't work here? skip it for now + _PKG_CAP_TARGETS.clear() if not _PKG_CAP_TARGETS: pytest.skip("Capability not provided") return _PKG_CAP_TARGETS @@ -73,7 +94,7 @@ def PKG_CAP_TARGETS(grains): @pytest.fixture def PKG_32_TARGETS(grains): _PKG_32_TARGETS = [] - if grains["os_family"] == "RedHat": + if grains["os_family"] == "RedHat" and grains["oscodename"] != "Photon": if grains["os"] == "CentOS": if grains["osmajorrelease"] == 5: _PKG_32_TARGETS = ["xz-devel.i386"] @@ -87,7 +108,7 @@ def PKG_32_TARGETS(grains): @pytest.fixture def PKG_DOT_TARGETS(grains): _PKG_DOT_TARGETS = [] - if grains["os_family"] == "RedHat": + if grains["os_family"] == "RedHat" and grains["oscodename"] != "Photon": if grains["osmajorrelease"] == 5: _PKG_DOT_TARGETS = ["python-migrate0.5"] elif grains["osmajorrelease"] == 6: @@ -106,7 +127,7 @@ def PKG_DOT_TARGETS(grains): @pytest.fixture def PKG_EPOCH_TARGETS(grains): _PKG_EPOCH_TARGETS = [] - if grains["os_family"] == "RedHat": + if grains["os_family"] == "RedHat" and grains["oscodename"] != "Photon": if grains["osmajorrelease"] == 7: _PKG_EPOCH_TARGETS = ["comps-extras"] elif grains["osmajorrelease"] == 8: @@ -174,6 +195,7 @@ def run_command(*names): @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_001_installed(modules, states, PKG_TARGETS): """ This is a destructive test as it installs and then removes a package @@ -194,6 +216,7 @@ def test_pkg_001_installed(modules, states, PKG_TARGETS): @pytest.mark.usefixtures("VERSION_SPEC_SUPPORTED") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_002_installed_with_version(PKG_TARGETS, states, latest_version): """ This is a destructive test as it installs and then removes a package @@ -213,6 +236,7 @@ def test_pkg_002_installed_with_version(PKG_TARGETS, states, latest_version): @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_003_installed_multipkg(PKG_TARGETS, modules, states): """ This is a destructive test as it installs and then removes two packages @@ -236,6 +260,7 @@ def test_pkg_003_installed_multipkg(PKG_TARGETS, modules, states): @pytest.mark.usefixtures("VERSION_SPEC_SUPPORTED") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_004_installed_multipkg_with_version(PKG_TARGETS, latest_version, states): """ This is a destructive test as it installs and then removes two packages @@ -259,6 +284,7 @@ def test_pkg_004_installed_multipkg_with_version(PKG_TARGETS, latest_version, st @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_005_installed_32bit(PKG_32_TARGETS, modules, states): """ This is a destructive test as it installs and then removes a package @@ -283,6 +309,7 @@ def test_pkg_005_installed_32bit(PKG_32_TARGETS, modules, states): @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_006_installed_32bit_with_version(PKG_32_TARGETS, latest_version, states): """ This is a destructive test as it installs and then removes a package @@ -307,6 +334,7 @@ def test_pkg_006_installed_32bit_with_version(PKG_32_TARGETS, latest_version, st @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_007_with_dot_in_pkgname(PKG_DOT_TARGETS, latest_version, states): """ This tests for the regression found in the following issue: @@ -329,6 +357,7 @@ def test_pkg_007_with_dot_in_pkgname(PKG_DOT_TARGETS, latest_version, states): @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_008_epoch_in_version(PKG_EPOCH_TARGETS, latest_version, states): """ This tests for the regression found in the following issue: @@ -455,6 +484,7 @@ def test_pkg_011_latest_only_upgrade( @pytest.mark.usefixtures("WILDCARDS_SUPPORTED") @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_012_installed_with_wildcard_version(PKG_TARGETS, states, modules): """ This is a destructive test as it installs and then removes a package @@ -502,6 +532,7 @@ def test_pkg_012_installed_with_wildcard_version(PKG_TARGETS, states, modules): @pytest.mark.requires_salt_modules("pkg.version", "pkg.latest_version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_013_installed_with_comparison_operator( grains, PKG_TARGETS, states, modules ): @@ -540,6 +571,7 @@ def test_pkg_013_installed_with_comparison_operator( @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_014_installed_missing_release(grains, PKG_TARGETS, states, modules): """ Tests that a version number missing the release portion still resolves @@ -572,6 +604,7 @@ def test_pkg_014_installed_missing_release(grains, PKG_TARGETS, states, modules) "pkg.hold", "pkg.unhold", "pkg.version", "pkg.list_pkgs" ) @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_015_installed_held(grains, modules, states, PKG_TARGETS): """ Tests that a package can be held even when the package is already installed. @@ -649,6 +682,7 @@ def test_pkg_015_installed_held(grains, modules, states, PKG_TARGETS): @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_016_conditionally_ignore_epoch(PKG_EPOCH_TARGETS, latest_version, states): """ See @@ -753,6 +787,7 @@ def test_pkg_017_installed_held_equals_false(grains, modules, states, PKG_TARGET @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_cap_001_installed(PKG_CAP_TARGETS, modules, states): """ This is a destructive test as it installs and then removes a package @@ -788,6 +823,7 @@ def test_pkg_cap_001_installed(PKG_CAP_TARGETS, modules, states): @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_cap_002_already_installed(PKG_CAP_TARGETS, modules, states): """ This is a destructive test as it installs and then removes a package @@ -829,6 +865,7 @@ def test_pkg_cap_002_already_installed(PKG_CAP_TARGETS, modules, states): @pytest.mark.usefixtures("VERSION_SPEC_SUPPORTED") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed") +@pytest.mark.slow_test def test_pkg_cap_003_installed_multipkg_with_version( PKG_CAP_TARGETS, PKG_TARGETS, @@ -891,6 +928,7 @@ def test_pkg_cap_003_installed_multipkg_with_version( @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.latest", "pkg.removed") +@pytest.mark.slow_test def test_pkg_cap_004_latest(PKG_CAP_TARGETS, modules, states): """ This tests pkg.latest with a package that has no epoch (or a zero @@ -930,6 +968,7 @@ def test_pkg_cap_004_latest(PKG_CAP_TARGETS, modules, states): @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed", "pkg.downloaded") +@pytest.mark.slow_test def test_pkg_cap_005_downloaded(PKG_CAP_TARGETS, modules, states): """ This is a destructive test as it installs and then removes a package @@ -963,6 +1002,7 @@ def test_pkg_cap_005_downloaded(PKG_CAP_TARGETS, modules, states): @pytest.mark.requires_salt_modules("pkg.version") @pytest.mark.requires_salt_states("pkg.installed", "pkg.removed", "pkg.uptodate") +@pytest.mark.slow_test def test_pkg_cap_006_uptodate(PKG_CAP_TARGETS, modules, states): """ This is a destructive test as it installs and then removes a package diff --git a/tests/pytests/functional/states/test_pkgrepo.py b/tests/pytests/functional/states/test_pkgrepo.py index cee5870d8b23..767ded0940d3 100644 --- a/tests/pytests/functional/states/test_pkgrepo.py +++ b/tests/pytests/functional/states/test_pkgrepo.py @@ -5,37 +5,33 @@ import salt.utils.files -@pytest.mark.skipif( - not any([x for x in ["ubuntu", "debian"] if x in platform.platform()]), - reason="Test only for debian based platforms", +@pytest.mark.parametrize( + "options", + [ + "", + " signed-by=/foo/bar ", + " trusted=yes", + "signed-by=/foo/bar arch=amd64,i386", + "signed-by=foo/bar trusted=yes arch=amd64", + ], ) -def test_adding_repo_file(states, tmp_path): - """ - test adding a repo file using pkgrepo.managed - """ - repo_file = str(tmp_path / "stable-binary.list") - repo_content = "deb http://www.deb-multimedia.org stable main" - ret = states.pkgrepo.managed(name=repo_content, file=repo_file, clean_file=True) - with salt.utils.files.fopen(repo_file, "r") as fp: - file_content = fp.read() - assert file_content.strip() == repo_content - - @pytest.mark.skipif( not any([x for x in ["ubuntu", "debian"] if x in platform.platform()]), reason="Test only for debian based platforms", ) -def test_adding_repo_file_arch(states, tmp_path): +def test_adding_repo_file_options(states, tmp_path, options): """ test adding a repo file using pkgrepo.managed - and setting architecture + and maintaining the user-supplied options """ repo_file = str(tmp_path / "stable-binary.list") - repo_content = "deb [arch=amd64 ] http://www.deb-multimedia.org stable main" + option = f"[{options}] " if options != "" else "" + expected_option = f"[{options.strip()}] " if options != "" else "" + repo_content = f"deb {option}http://www.deb-multimedia.org stable main" ret = states.pkgrepo.managed(name=repo_content, file=repo_file, clean_file=True) with salt.utils.files.fopen(repo_file, "r") as fp: file_content = fp.read() assert ( file_content.strip() - == "deb [arch=amd64] http://www.deb-multimedia.org stable main" + == f"deb {expected_option}http://www.deb-multimedia.org stable main" ) diff --git a/tests/pytests/functional/states/test_reg.py b/tests/pytests/functional/states/test_reg.py index 10a4d155aa6f..def550f55aca 100644 --- a/tests/pytests/functional/states/test_reg.py +++ b/tests/pytests/functional/states/test_reg.py @@ -49,7 +49,6 @@ def configure_loader_modules(): "dacl.check_perms": win_dacl.check_perms, }, }, - win_dacl: {"__opts__": {"test": False}}, } diff --git a/tests/pytests/functional/states/test_service.py b/tests/pytests/functional/states/test_service.py new file mode 100644 index 000000000000..a1e9673653ba --- /dev/null +++ b/tests/pytests/functional/states/test_service.py @@ -0,0 +1,132 @@ +""" +Tests for the service state +""" + +import os + +import pytest + +import salt.utils.path +import salt.utils.platform + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.destructive_test, + pytest.mark.slow_test, +] + + +INIT_DELAY = 5 +STOPPED = False +RUNNING = True + + +@pytest.fixture +def service_name(grains, modules): + # For local testing purposes + env_name = os.environ.get("SALT_FUNCTIONAL_TEST_SERVICE_NAME") + if env_name is not None: + return env_name + + service_name = "cron" + cmd_name = "crontab" + os_family = grains.get("os_family") + is_systemd = grains.get("systemd") + if os_family == "RedHat": + service_name = "crond" + elif os_family == "Arch": + service_name = "sshd" + cmd_name = "systemctl" + elif os_family == "MacOS": + service_name = "com.apple.AirPlayXPCHelper" + elif os_family == "Windows": + service_name = "Spooler" + + if os_family != "Windows" and salt.utils.path.which(cmd_name) is None: + pytest.skip("{} is not installed".format(cmd_name)) + + if is_systemd and modules.service.offline(): + pytest.skip("systemd is OFFLINE") + + return service_name + + +@pytest.fixture(autouse=True) +def setup_service(service_name, modules): + pre_srv_status = modules.service.status(service_name) + pre_srv_enabled = service_name in modules.service.get_enabled() + + try: + yield pre_srv_status + finally: + post_srv_status = modules.service.status(service_name) + post_srv_enabled = service_name in modules.service.get_enabled() + + if post_srv_status != pre_srv_status: + if pre_srv_status: + modules.service.start(service_name) + else: + modules.service.stop(service_name) + + if post_srv_enabled != pre_srv_enabled: + if pre_srv_enabled: + modules.service.enable(service_name) + else: + modules.service.disable(service_name) + + +def check_service_status(exp_return, modules, service_name): + """ + helper method to check status of service + """ + check_status = modules.service.status(service_name) + + if check_status is not exp_return: + pytest.fail("status of service is not returning correctly") + + +@pytest.mark.slow_test +def test_service_running(service_name, modules, states): + """ + test service.running state module + """ + if modules.service.status(service_name): + stop_service = modules.service.stop(service_name) + assert stop_service is True + check_service_status(STOPPED, modules, service_name) + + if salt.utils.platform.is_darwin(): + # make sure the service is enabled on macosx + enable = modules.service.enable(service_name) + + start_service = states.service.running(service_name) + assert start_service.full_return["result"] is True + check_service_status(RUNNING, modules, service_name) + + +@pytest.mark.slow_test +def test_service_dead(service_name, modules, states): + """ + test service.dead state module + """ + start_service = states.service.running(service_name) + assert start_service.full_return["result"] is True + check_service_status(RUNNING, modules, service_name) + + ret = states.service.dead(service_name) + assert ret.full_return["result"] is True + check_service_status(STOPPED, modules, service_name) + + +@pytest.mark.slow_test +def test_service_dead_init_delay(service_name, modules, states): + """ + test service.dead state module + """ + start_service = states.service.running(service_name) + assert start_service.full_return["result"] is True + check_service_status(RUNNING, modules, service_name) + + ret = states.service.dead(service_name, init_delay=INIT_DELAY) + assert ret.full_return["result"] is True + check_service_status(STOPPED, modules, service_name) diff --git a/tests/pytests/functional/states/test_ssh_auth.py b/tests/pytests/functional/states/test_ssh_auth.py new file mode 100644 index 000000000000..7ed7c7047fe4 --- /dev/null +++ b/tests/pytests/functional/states/test_ssh_auth.py @@ -0,0 +1,57 @@ +import logging +import pathlib + +import pytest + +import salt.states.ssh_auth as ssh_auth_state +import salt.utils.files + +log = logging.getLogger(__name__) + + +@pytest.fixture +def configure_loader_modules(modules, minion_opts): + loader = {"__salt__": modules, "__opts__": minion_opts, "__env__": "base"} + return {ssh_auth_state: loader} + + +@pytest.fixture(scope="module") +def system_user(): + with pytest.helpers.create_account() as system_account: + yield system_account + + +@pytest.mark.skip_if_not_root +@pytest.mark.destructive_test +@pytest.mark.slow_test +def test_ssh_auth_config(tmp_path, system_user, state_tree): + """ + test running ssh_auth state when + different config is set. Ensure + it does not edit the default config. + """ + userdetails = system_user.info + user_ssh_dir = pathlib.Path(userdetails.home, ".ssh") + ret = ssh_auth_state.manage( + name="test", + user=system_user.username, + ssh_keys=["ssh-dss AAAAB3NzaCL0sQ9fJ5bYTEyY== root@domain"], + ) + with salt.utils.files.fopen(user_ssh_dir / "authorized_keys") as fp: + pre_data = fp.read() + file_contents = "ssh-dss AAAAB3NzaCL0sQ9fJ5bYTEyY== root@domain" + new_auth_file = tmp_path / "authorized_keys3" + with pytest.helpers.temp_file("authorized", file_contents, state_tree): + ssh_auth_state.manage( + name="test", + user=system_user.username, + source=f"salt://authorized", + config=str(new_auth_file), + ssh_keys=[""], + ) + with salt.utils.files.fopen(user_ssh_dir / "authorized_keys") as fp: + post_data = fp.read() + assert pre_data == post_data + with salt.utils.files.fopen(new_auth_file) as fp: + data = fp.read().strip() + assert data == file_contents diff --git a/tests/pytests/functional/states/test_user.py b/tests/pytests/functional/states/test_user.py index ba2d01c0143e..96b1ec55c882 100644 --- a/tests/pytests/functional/states/test_user.py +++ b/tests/pytests/functional/states/test_user.py @@ -76,6 +76,7 @@ def existing_account(): yield _account +@pytest.mark.slow_test def test_user_absent(states): """ Test user.absent with a non existing account @@ -116,7 +117,6 @@ def test_user_present_when_home_dir_does_not_18843(states, existing_account): ret = states.user.present( name=existing_account.username, home=existing_account.info.home, - remove_groups=False, ) assert ret.result is True assert pathlib.Path(existing_account.info.home).is_dir() @@ -227,7 +227,6 @@ def test_user_present_unicode(states, username, subtests): roomnumber="①②③", workphone="١٢٣٤", homephone="६७८", - remove_groups=False, ) assert ret.result is True @@ -428,3 +427,75 @@ def test_user_present_change_optional_groups( user_info = modules.user.info(username) assert user_info assert user_info["groups"] == [group_1.name] + + +@pytest.mark.skip_unless_on_linux(reason="underlying functionality only runs on Linux") +def test_user_present_no_groups(modules, states, username): + """ + test user.present when groups arg is not + included by the group is created in another + state. Re-run the states to ensure there are + not changes and it is idempotent. + """ + groups = ["testgroup1", "testgroup2"] + try: + ret = states.group.present(name=username, gid=61121) + assert ret.result is True + + ret = states.user.present( + name=username, + uid=61121, + gid=61121, + ) + assert ret.result is True + assert ret.changes["groups"] == [username] + assert ret.changes["name"] == username + + ret = states.group.present( + name=groups[0], + members=[username], + ) + assert ret.changes["members"] == [username] + + ret = states.group.present( + name=groups[1], + members=[username], + ) + assert ret.changes["members"] == [username] + + user_info = modules.user.info(username) + assert user_info + assert user_info["groups"] == [username, groups[0], groups[1]] + + # run again, expecting no changes + ret = states.group.present(name=username) + assert ret.result is True + assert ret.changes == {} + + ret = states.user.present( + name=username, + ) + assert ret.result is True + assert ret.changes == {} + + ret = states.group.present( + name=groups[0], + members=[username], + ) + assert ret.result is True + assert ret.changes == {} + + ret = states.group.present( + name=groups[1], + members=[username], + ) + assert ret.result is True + assert ret.changes == {} + + user_info = modules.user.info(username) + assert user_info + assert user_info["groups"] == [username, groups[0], groups[1]] + finally: + for group in groups: + ret = states.group.absent(name=group) + assert ret.result is True diff --git a/tests/pytests/functional/states/test_virtualenv_mod.py b/tests/pytests/functional/states/test_virtualenv_mod.py index 7432152acedc..af08c5dec219 100644 --- a/tests/pytests/functional/states/test_virtualenv_mod.py +++ b/tests/pytests/functional/states/test_virtualenv_mod.py @@ -9,6 +9,7 @@ pytestmark = [ pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False), ] diff --git a/tests/pytests/functional/states/test_x509_v2.py b/tests/pytests/functional/states/test_x509_v2.py index fe7cea6ba840..3cd09d7d8400 100644 --- a/tests/pytests/functional/states/test_x509_v2.py +++ b/tests/pytests/functional/states/test_x509_v2.py @@ -1,5 +1,5 @@ import base64 -from pathlib import Path +import pathlib import pytest @@ -26,6 +26,7 @@ pytestmark = [ pytest.mark.slow_test, pytest.mark.skipif(HAS_LIBS is False, reason="Needs cryptography library"), + pytest.mark.skip_on_fips_enabled_platform, ] @@ -703,7 +704,7 @@ def existing_pk(x509, pk_args, request): @pytest.fixture(params=["existing_cert"]) def existing_symlink(request): existing = request.getfixturevalue(request.param) - test_file = Path(existing).with_name("symlink") + test_file = pathlib.Path(existing).with_name("symlink") test_file.symlink_to(existing) yield test_file # cleanup is done by tmp_path @@ -884,7 +885,7 @@ def test_certificate_managed_test_true(x509, cert_args, rsa_privkey, ca_key): ret = x509.certificate_managed(**cert_args) assert ret.result is None assert ret.changes - assert not Path(cert_args["name"]).exists() + assert not pathlib.Path(cert_args["name"]).exists() @pytest.mark.usefixtures("existing_cert") @@ -1205,7 +1206,7 @@ def test_certificate_managed_chain_change( if cert_args["encoding"].startswith("pkcs7"): cert = cert[0] elif cert_args["encoding"] == "pkcs12": - if CRYPTOGRAPHY_VERSION[0] == 36: + if CRYPTOGRAPHY_VERSION[0] >= 36: # it seems (serial number) parsing of pkcs12 certificates is broken (?) in that release return cert = cert.cert.certificate @@ -1324,7 +1325,7 @@ def test_certificate_managed_file_managed_create_false( ret = x509.certificate_managed(**cert_args) assert ret.result is True assert not ret.changes - assert not Path(cert_args["name"]).exists() + assert not pathlib.Path(cert_args["name"]).exists() @pytest.mark.usefixtures("existing_cert") @@ -1397,7 +1398,7 @@ def test_certificate_managed_follow_symlinks( """ cert_args["name"] = str(existing_symlink) cert_args["encoding"] = encoding - assert Path(cert_args["name"]).is_symlink() + assert pathlib.Path(cert_args["name"]).is_symlink() cert_args["follow_symlinks"] = follow ret = x509.certificate_managed(**cert_args) assert bool(ret.changes) == (not follow) @@ -1417,13 +1418,13 @@ def test_certificate_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ cert_args["name"] = str(existing_symlink) - assert Path(cert_args["name"]).is_symlink() + assert pathlib.Path(cert_args["name"]).is_symlink() cert_args["follow_symlinks"] = follow cert_args["encoding"] = encoding cert_args["CN"] = "new" ret = x509.certificate_managed(**cert_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -1436,7 +1437,7 @@ def test_certificate_managed_file_managed_error( cert_args["private_key"] = rsa_privkey cert_args["makedirs"] = False cert_args["encoding"] = encoding - cert_args["name"] = str(Path(cert_args["name"]).parent / "missing" / "cert") + cert_args["name"] = str(pathlib.Path(cert_args["name"]).parent / "missing" / "cert") ret = x509.certificate_managed(**cert_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -1504,7 +1505,7 @@ def test_crl_managed_test_true(x509, crl_args, crl_revoked): assert ret.result is None assert ret.changes assert ret.result is None - assert not Path(crl_args["name"]).exists() + assert not pathlib.Path(crl_args["name"]).exists() @pytest.mark.usefixtures("existing_crl") @@ -1708,7 +1709,7 @@ def test_crl_managed_file_managed_create_false(x509, crl_args): ret = x509.crl_managed(**crl_args) assert ret.result is True assert not ret.changes - assert not Path(crl_args["name"]).exists() + assert not pathlib.Path(crl_args["name"]).exists() @pytest.mark.usefixtures("existing_crl") @@ -1782,7 +1783,7 @@ def test_crl_managed_follow_symlinks( """ crl_args["name"] = str(existing_symlink) crl_args["encoding"] = encoding - assert Path(crl_args["name"]).is_symlink() + assert pathlib.Path(crl_args["name"]).is_symlink() crl_args["follow_symlinks"] = follow ret = x509.crl_managed(**crl_args) assert bool(ret.changes) == (not follow) @@ -1802,13 +1803,13 @@ def test_crl_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ crl_args["name"] = str(existing_symlink) - assert Path(crl_args["name"]).is_symlink() + assert pathlib.Path(crl_args["name"]).is_symlink() crl_args["follow_symlinks"] = follow crl_args["encoding"] = encoding crl_args["revoked"] = crl_revoked ret = x509.crl_managed(**crl_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -1818,7 +1819,7 @@ def test_crl_managed_file_managed_error(x509, crl_args, encoding): """ crl_args["makedirs"] = False crl_args["encoding"] = encoding - crl_args["name"] = str(Path(crl_args["name"]).parent / "missing" / "crl") + crl_args["name"] = str(pathlib.Path(crl_args["name"]).parent / "missing" / "crl") ret = x509.crl_managed(**crl_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -1866,7 +1867,7 @@ def test_csr_managed_test_true(x509, csr_args, rsa_privkey): ret = x509.csr_managed(**csr_args) assert ret.result is None assert ret.changes - assert not Path(csr_args["name"]).exists() + assert not pathlib.Path(csr_args["name"]).exists() @pytest.mark.usefixtures("existing_csr") @@ -2002,7 +2003,7 @@ def test_csr_managed_file_managed_create_false(x509, csr_args): ret = x509.csr_managed(**csr_args) assert ret.result is True assert not ret.changes - assert not Path(csr_args["name"]).exists() + assert not pathlib.Path(csr_args["name"]).exists() @pytest.mark.usefixtures("existing_csr") @@ -2066,12 +2067,12 @@ def test_csr_managed_follow_symlinks( the checking of the existing file is performed by the x509 module """ csr_args["name"] = str(existing_symlink) - assert Path(csr_args["name"]).is_symlink() + assert pathlib.Path(csr_args["name"]).is_symlink() csr_args["follow_symlinks"] = follow csr_args["encoding"] = encoding ret = x509.csr_managed(**csr_args) assert bool(ret.changes) == (not follow) - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize( @@ -2088,14 +2089,14 @@ def test_csr_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ csr_args["name"] = str(existing_symlink) - assert Path(csr_args["name"]).is_symlink() + assert pathlib.Path(csr_args["name"]).is_symlink() csr_args["follow_symlinks"] = follow csr_args["encoding"] = encoding csr_args["CN"] = "new" ret = x509.csr_managed(**csr_args) assert ret.result assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.parametrize("encoding", ["pem", "der"]) @@ -2105,7 +2106,7 @@ def test_csr_managed_file_managed_error(x509, csr_args, encoding): """ csr_args["makedirs"] = False csr_args["encoding"] = encoding - csr_args["name"] = str(Path(csr_args["name"]).parent / "missing" / "csr") + csr_args["name"] = str(pathlib.Path(csr_args["name"]).parent / "missing" / "csr") ret = x509.csr_managed(**csr_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -2170,6 +2171,21 @@ def test_private_key_managed_existing(x509, pk_args): _assert_not_changed(ret) +@pytest.mark.usefixtures("existing_pk") +@pytest.mark.parametrize( + "existing_pk", + [ + {"algo": "rsa", "keysize": 3072}, + ], + indirect=True, +) +def test_private_key_managed_existing_keysize_change_to_default(x509, pk_args): + pk_args.pop("keysize") + ret = x509.private_key_managed(**pk_args) + assert ret.changes + assert ret.changes["keysize"] == 2048 + + @pytest.mark.usefixtures("existing_pk") def test_private_key_managed_existing_new(x509, pk_args): cur = _get_privkey(pk_args["name"]) @@ -2297,7 +2313,7 @@ def test_private_key_managed_file_managed_create_false(x509, pk_args): ret = x509.private_key_managed(**pk_args) assert ret.result is True assert not ret.changes - assert not Path(pk_args["name"]).exists() + assert not pathlib.Path(pk_args["name"]).exists() @pytest.mark.usefixtures("existing_pk") @@ -2346,7 +2362,7 @@ def test_private_key_managed_follow_symlinks( """ pk_args["name"] = str(existing_symlink) pk_args["encoding"] = encoding - assert Path(pk_args["name"]).is_symlink() + assert pathlib.Path(pk_args["name"]).is_symlink() pk_args["follow_symlinks"] = follow ret = x509.private_key_managed(**pk_args) assert bool(ret.changes) == (not follow) @@ -2366,13 +2382,13 @@ def test_private_key_managed_follow_symlinks_changes( the checking of the existing file is performed by the x509 module """ pk_args["name"] = str(existing_symlink) - assert Path(pk_args["name"]).is_symlink() + assert pathlib.Path(pk_args["name"]).is_symlink() pk_args["follow_symlinks"] = follow pk_args["encoding"] = encoding pk_args["algo"] = "ec" ret = x509.private_key_managed(**pk_args) assert ret.changes - assert Path(ret.name).is_symlink() == follow + assert pathlib.Path(ret.name).is_symlink() == follow @pytest.mark.usefixtures("existing_pk") @@ -2400,7 +2416,7 @@ def test_private_key_managed_file_managed_error(x509, pk_args, encoding): """ pk_args["makedirs"] = False pk_args["encoding"] = encoding - pk_args["name"] = str(Path(pk_args["name"]).parent / "missing" / "pk") + pk_args["name"] = str(pathlib.Path(pk_args["name"]).parent / "missing" / "pk") ret = x509.private_key_managed(**pk_args) assert ret.result is False assert "Could not create file, see file.managed output" in ret.comment @@ -2456,7 +2472,7 @@ def test_certificate_managed_should_not_fail_with_removed_args( cert_args["days_valid"] = 30 cert_args["days_remaining"] = 7 cert_args["private_key"] = rsa_privkey - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.certificate_managed(**cert_args, **arg) assert ret.result is True cert = _get_cert(cert_args["name"]) @@ -2469,7 +2485,7 @@ def test_certificate_managed_warns_about_algorithm_renaming( cert_args["days_valid"] = 30 cert_args["days_remaining"] = 7 cert_args["private_key"] = rsa_privkey - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.certificate_managed(**cert_args, algorithm="sha512") assert ret.result is True cert = _get_cert(cert_args["name"]) @@ -2483,7 +2499,7 @@ def test_certificate_managed_warns_about_long_name_attributes( cert_args["days_remaining"] = 7 cert_args["commonName"] = "success" cert_args["private_key"] = rsa_privkey - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.certificate_managed(**cert_args) assert ret.result is True cert = _get_cert(cert_args["name"]) @@ -2495,7 +2511,7 @@ def test_certificate_managed_warns_about_long_extensions(x509, cert_args, rsa_pr cert_args["days_valid"] = 30 cert_args["days_remaining"] = 7 cert_args["private_key"] = rsa_privkey - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.certificate_managed(**cert_args) assert ret.result is True cert = _get_cert(cert_args["name"]) @@ -2508,7 +2524,7 @@ def test_certificate_managed_warns_about_long_extensions(x509, cert_args, rsa_pr @pytest.mark.parametrize("arg", [{"version": 1}, {"text": True}]) def test_csr_managed_should_not_fail_with_removed_args(x509, arg, csr_args): - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.csr_managed(**csr_args, **arg) assert ret.result is True csr = _get_csr(csr_args["name"]) @@ -2516,7 +2532,7 @@ def test_csr_managed_should_not_fail_with_removed_args(x509, arg, csr_args): def test_csr_managed_warns_about_algorithm_renaming(x509, csr_args): - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.csr_managed(**csr_args, algorithm="sha512") assert ret.result is True csr = _get_csr(csr_args["name"]) @@ -2525,7 +2541,7 @@ def test_csr_managed_warns_about_algorithm_renaming(x509, csr_args): def test_csr_managed_warns_about_long_name_attributes(x509, csr_args): csr_args.pop("CN", None) - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.csr_managed(**csr_args, commonName="deprecated_yo") assert ret.result is True csr = _get_csr(csr_args["name"]) @@ -2534,7 +2550,7 @@ def test_csr_managed_warns_about_long_name_attributes(x509, csr_args): def test_csr_managed_warns_about_long_extensions(x509, csr_args): csr_args["X509v3 Basic Constraints"] = "critical CA:FALSE" - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.csr_managed(**csr_args) assert ret.result is True csr = _get_csr(csr_args["name"]) @@ -2549,7 +2565,7 @@ def test_csr_managed_warns_about_long_extensions(x509, csr_args): def test_crl_managed_should_not_fail_with_removed_args(x509, arg, crl_args): crl_args["days_remaining"] = 3 crl_args["days_valid"] = 7 - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.crl_managed(**crl_args, **arg) assert ret.result is True crl = _get_crl(crl_args["name"]) @@ -2564,7 +2580,7 @@ def test_crl_managed_should_recognize_old_style_revoked(x509, crl_args, crl_revo crl_args["revoked"] = revoked crl_args["days_remaining"] = 3 crl_args["days_valid"] = 7 - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.crl_managed(**crl_args) assert ret.result is True crl = _get_crl(crl_args["name"]) @@ -2587,7 +2603,7 @@ def test_crl_managed_should_recognize_old_style_revoked_for_change_detection( crl_args["revoked"] = revoked crl_args["days_remaining"] = 3 crl_args["days_valid"] = 7 - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.crl_managed(**crl_args) assert ret.result is True assert not ret.changes @@ -2598,7 +2614,7 @@ def test_crl_managed_should_recognize_old_style_reason(x509, crl_args): crl_args["revoked"] = revoked crl_args["days_remaining"] = 3 crl_args["days_valid"] = 7 - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.crl_managed(**crl_args) assert ret.result is True crl = _get_crl(crl_args["name"]) @@ -2614,14 +2630,14 @@ def test_crl_managed_should_recognize_old_style_reason(x509, crl_args): "arg", [{"cipher": "aes_256_cbc"}, {"verbose": True}, {"text": True}] ) def test_private_key_managed_should_not_fail_with_removed_args(x509, arg, pk_args): - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.private_key_managed(**pk_args, **arg) assert ret.result is True assert _get_privkey(pk_args["name"]) def test_private_key_managed_warns_about_bits_renaming(x509, pk_args): - with pytest.deprecated_call(): + with pytest.warns(DeprecationWarning): ret = x509.private_key_managed(**pk_args, bits=3072) assert ret.result is True pk = _get_privkey(pk_args["name"]) @@ -2678,7 +2694,7 @@ def _assert_cert_basic( def _get_cert(cert, encoding="pem", passphrase=None): try: - p = Path(cert) + p = pathlib.Path(cert) if p.exists(): cert = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2760,7 +2776,7 @@ def _assert_not_changed(ret): def _get_crl(crl, encoding="pem"): try: - p = Path(crl) + p = pathlib.Path(crl) if p.exists(): crl = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2778,7 +2794,7 @@ def _get_crl(crl, encoding="pem"): def _get_csr(csr, encoding="pem"): try: - p = Path(csr) + p = pathlib.Path(csr) if p.exists(): csr = p.read_bytes() except Exception: # pylint: disable=broad-except @@ -2796,7 +2812,7 @@ def _get_csr(csr, encoding="pem"): def _get_privkey(pk, encoding="pem", passphrase=None): try: - p = Path(pk) + p = pathlib.Path(pk) if p.exists(): pk = p.read_bytes() except Exception: # pylint: disable=broad-except diff --git a/tests/pytests/functional/states/test_zookeeper.py b/tests/pytests/functional/states/test_zookeeper.py index d33f7e47eabd..e735fdd1cf08 100644 --- a/tests/pytests/functional/states/test_zookeeper.py +++ b/tests/pytests/functional/states/test_zookeeper.py @@ -15,6 +15,7 @@ pytestmark = [ pytest.mark.slow_test, pytest.mark.skip_if_binaries_missing("dockerd"), + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/states/win_lgpo/test_admx_policy.py b/tests/pytests/functional/states/win_lgpo/test_admx_policy.py index b8a40261bd05..bb2edcbcd2c9 100644 --- a/tests/pytests/functional/states/win_lgpo/test_admx_policy.py +++ b/tests/pytests/functional/states/win_lgpo/test_admx_policy.py @@ -15,6 +15,7 @@ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.destructive_test, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py b/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py index 9129d82ed674..70b1638a849e 100644 --- a/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py +++ b/tests/pytests/functional/states/win_lgpo/test_adv_audit_settings_state.py @@ -8,6 +8,7 @@ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.destructive_test, + pytest.mark.slow_test, ] @@ -19,11 +20,13 @@ def configure_loader_modules(minion_opts, modules): "__opts__": minion_opts, "__salt__": modules, "__utils__": utils, + "__context__": {}, }, win_lgpo_module: { "__opts__": minion_opts, "__salt__": modules, "__utils__": utils, + "__context__": {}, }, } diff --git a/tests/pytests/functional/states/win_lgpo/test_audit_settings_state.py b/tests/pytests/functional/states/win_lgpo/test_audit_settings_state.py index 400c99f03b76..6f45461618d0 100644 --- a/tests/pytests/functional/states/win_lgpo/test_audit_settings_state.py +++ b/tests/pytests/functional/states/win_lgpo/test_audit_settings_state.py @@ -10,6 +10,7 @@ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.destructive_test, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/states/win_lgpo/test_rights_assignment.py b/tests/pytests/functional/states/win_lgpo/test_rights_assignment.py index f7cc1300d829..9dded5a0744e 100644 --- a/tests/pytests/functional/states/win_lgpo/test_rights_assignment.py +++ b/tests/pytests/functional/states/win_lgpo/test_rights_assignment.py @@ -9,6 +9,7 @@ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.destructive_test, + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/test_payload.py b/tests/pytests/functional/test_payload.py index 9d069eb0f4a8..8dcd3fa1a560 100644 --- a/tests/pytests/functional/test_payload.py +++ b/tests/pytests/functional/test_payload.py @@ -138,6 +138,19 @@ def test_destroy(sreq, echo_server): """ Test the __del__ capabilities """ + # ensure we actually have an open socket and not just testing against + # no actual sockets created. + assert sreq.send("clear", "foo") == {"enc": "clear", "load": "foo"} # ensure no exceptions when we go to destroy the sreq, since __del__ # swallows exceptions, we have to call destroy directly sreq.destroy() + + +@pytest.mark.slow_test +def test_clear_socket(sreq, echo_server): + # ensure we actually have an open socket and not just testing against + # no actual sockets created. + assert sreq.send("clear", "foo") == {"enc": "clear", "load": "foo"} + assert hasattr(sreq, "_socket") + sreq.clear_socket() + assert hasattr(sreq, "_socket") is False diff --git a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py index f9360297aa4b..63d7239968df 100644 --- a/tests/pytests/functional/transport/ipc/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/ipc/test_pub_server_channel.py @@ -13,9 +13,10 @@ pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", - ) + ), ] diff --git a/tests/pytests/functional/transport/server/test_req_channel.py b/tests/pytests/functional/transport/server/test_req_channel.py index eed86f21c68a..46a3b2fe0e50 100644 --- a/tests/pytests/functional/transport/server/test_req_channel.py +++ b/tests/pytests/functional/transport/server/test_req_channel.py @@ -11,8 +11,6 @@ import salt.exceptions import salt.ext.tornado.gen import salt.master -import salt.transport.client -import salt.transport.server import salt.utils.platform import salt.utils.process import salt.utils.stringutils @@ -23,7 +21,8 @@ pytestmark = [ pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", - ) + ), + pytest.mark.slow_test, ] diff --git a/tests/pytests/functional/transport/tcp/test_load_balanced_server.py b/tests/pytests/functional/transport/tcp/test_load_balanced_server.py new file mode 100644 index 000000000000..9ab429b1ff44 --- /dev/null +++ b/tests/pytests/functional/transport/tcp/test_load_balanced_server.py @@ -0,0 +1,56 @@ +import multiprocessing +import socket +import threading +import time + +import pytest + +import salt.transport.tcp + +pytestmark = [ + pytest.mark.core_test, +] + + +@pytest.mark.skip_on_fips_enabled_platform +def test_tcp_load_balancer_server(master_opts, io_loop): + + messages = [] + + def handler(stream, message, header): + messages.append(message) + + queue = multiprocessing.Queue() + server = salt.transport.tcp.LoadBalancerServer(master_opts, queue) + worker = salt.transport.tcp.LoadBalancerWorker(queue, handler, io_loop=io_loop) + + def run_loop(): + io_loop.start() + + loop_thread = threading.Thread(target=run_loop) + loop_thread.start() + + thread = threading.Thread(target=server.run) + thread.start() + + # Wait for bind to happen. + time.sleep(0.5) + + package = {"foo": "bar"} + payload = salt.transport.frame.frame_msg(package) + sock = socket.socket() + sock.connect(("127.0.0.1", master_opts["ret_port"])) + sock.send(payload) + + try: + start = time.monotonic() + while not messages: + time.sleep(0.3) + if time.monotonic() - start > 30: + assert False, "Took longer than 30 seconds to receive message" + assert [package] == messages + finally: + server.close() + thread.join() + io_loop.stop() + worker.close() diff --git a/tests/pytests/functional/transport/tcp/test_pub_server.py b/tests/pytests/functional/transport/tcp/test_pub_server.py new file mode 100644 index 000000000000..a25b53851381 --- /dev/null +++ b/tests/pytests/functional/transport/tcp/test_pub_server.py @@ -0,0 +1,58 @@ +import threading +import time + +import salt.ext.tornado.gen +import salt.transport.tcp + + +async def test_pub_channel(master_opts, minion_opts, io_loop): + def presence_callback(client): + pass + + def remove_presence_callback(client): + pass + + master_opts["transport"] = "tcp" + minion_opts.update(master_ip="127.0.0.1", transport="tcp") + + server = salt.transport.tcp.TCPPublishServer(master_opts) + + client = salt.transport.tcp.TCPPubClient(minion_opts, io_loop) + + payloads = [] + + publishes = [] + + def publish_payload(payload, callback): + server.publish_payload(payload) + payloads.append(payload) + + def on_recv(message): + print("ON RECV") + publishes.append(message) + + thread = threading.Thread( + target=server.publish_daemon, + args=(publish_payload, presence_callback, remove_presence_callback), + ) + thread.start() + + # Wait for socket to bind. + time.sleep(3) + + await client.connect(master_opts["publish_port"]) + client.on_recv(on_recv) + + print("Publish message") + server.publish({"meh": "bah"}) + + start = time.monotonic() + try: + while not publishes: + await salt.ext.tornado.gen.sleep(0.3) + if time.monotonic() - start > 30: + assert False, "Message not published after 30 seconds" + finally: + server.io_loop.stop() + thread.join() + server.io_loop.close(all_fds=True) diff --git a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py index 3e46ab1b6a14..2a357c7c5dbf 100644 --- a/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py +++ b/tests/pytests/functional/transport/zeromq/test_pub_server_channel.py @@ -1,7 +1,10 @@ import logging +import threading +import time import pytest +import salt.transport.zeromq from tests.support.mock import MagicMock, patch from tests.support.pytest.transport import PubServerChannelProcess @@ -9,6 +12,7 @@ pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, pytest.mark.skip_on_freebsd(reason="Temporarily skipped on FreeBSD."), pytest.mark.skip_on_spawning_platform( reason="These tests are currently broken on spawning platforms. Need to be rewritten.", @@ -51,3 +55,86 @@ def test_zeromq_filtering(salt_master, salt_minion): assert len(results) == send_num, "{} != {}, difference: {}".format( len(results), send_num, set(expect).difference(results) ) + + +def test_pub_channel(master_opts): + server = salt.transport.zeromq.PublishServer(master_opts) + + payloads = [] + + def publish_payload(payload): + server.publish_payload(payload) + payloads.append(payload) + + thread = threading.Thread(target=server.publish_daemon, args=(publish_payload,)) + thread.start() + + server.publish({"meh": "bah"}) + + start = time.monotonic() + try: + while not payloads: + time.sleep(0.3) + if time.monotonic() - start > 30: + assert False, "No message received after 30 seconds" + finally: + server.close() + server.io_loop.stop() + thread.join() + server.io_loop.close(all_fds=True) + + +def test_pub_channel_filtering(master_opts): + master_opts["zmq_filtering"] = True + server = salt.transport.zeromq.PublishServer(master_opts) + + payloads = [] + + def publish_payload(payload): + server.publish_payload(payload) + payloads.append(payload) + + thread = threading.Thread(target=server.publish_daemon, args=(publish_payload,)) + thread.start() + + server.publish({"meh": "bah"}) + + start = time.monotonic() + try: + while not payloads: + time.sleep(0.3) + if time.monotonic() - start > 30: + assert False, "No message received after 30 seconds" + finally: + server.close() + server.io_loop.stop() + thread.join() + server.io_loop.close(all_fds=True) + + +def test_pub_channel_filtering_topic(master_opts): + master_opts["zmq_filtering"] = True + server = salt.transport.zeromq.PublishServer(master_opts) + + payloads = [] + + def publish_payload(payload): + server.publish_payload(payload, topic_list=["meh"]) + payloads.append(payload) + + thread = threading.Thread(target=server.publish_daemon, args=(publish_payload,)) + thread.start() + + server.publish({"meh": "bah"}) + + start = time.monotonic() + try: + while not payloads: + time.sleep(0.3) + if time.monotonic() - start > 30: + assert False, "No message received after 30 seconds" + finally: + server.close() + server.io_loop.stop() + thread.join() + server.io_loop.close(all_fds=True) diff --git a/tests/pytests/functional/transport/zeromq/test_request_client.py b/tests/pytests/functional/transport/zeromq/test_request_client.py new file mode 100644 index 000000000000..4ee99f49aa3d --- /dev/null +++ b/tests/pytests/functional/transport/zeromq/test_request_client.py @@ -0,0 +1,39 @@ +import pytest +import pytestshellutils.utils.ports +import zmq +import zmq.eventloop.zmqstream + +import salt.ext.tornado.gen +import salt.transport.zeromq + + +@pytest.fixture +def port(): + return pytestshellutils.utils.ports.get_unused_localhost_port() + + +async def test_request_channel_issue_64627(io_loop, minion_opts, port): + """ + Validate socket is preserved until request channel is explicitly closed. + """ + minion_opts["master_uri"] = f"tcp://127.0.0.1:{port}" + + ctx = zmq.Context() + socket = ctx.socket(zmq.REP) + socket.bind(minion_opts["master_uri"]) + stream = zmq.eventloop.zmqstream.ZMQStream(socket, io_loop=io_loop) + + @salt.ext.tornado.gen.coroutine + def req_handler(stream, msg): + yield stream.send(msg[0]) + + stream.on_recv_stream(req_handler) + + request_client = salt.transport.zeromq.RequestClient(minion_opts, io_loop) + + rep = await request_client.send(b"foo") + req_socket = request_client.message_client.socket + rep = await request_client.send(b"foo") + assert req_socket is request_client.message_client.socket + request_client.close() + assert request_client.message_client.socket is None diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py b/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py deleted file mode 100644 index c45b5730a8e4..000000000000 --- a/tests/pytests/functional/utils/pyinstaller/rthooks/test_salt_utils_vt_terminal.py +++ /dev/null @@ -1,142 +0,0 @@ -import json -import os -import sys - -import pytest - -import salt.utils.pyinstaller.rthooks._overrides as overrides -from tests.support import mock -from tests.support.helpers import PatchedEnviron - - -@pytest.fixture(params=("LD_LIBRARY_PATH", "LIBPATH")) -def envvar(request): - return request.param - - -@pytest.fixture -def meipass(envvar): - with mock.patch("salt.utils.pyinstaller.rthooks._overrides.sys") as patched_sys: - patched_sys._MEIPASS = "{}_VALUE".format(envvar) - assert overrides.sys._MEIPASS == "{}_VALUE".format(envvar) - yield "{}_VALUE".format(envvar) - assert not hasattr(sys, "_MEIPASS") - assert not hasattr(overrides.sys, "_MEIPASS") - - -def test_vt_terminal_environ_cleanup_original(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - with PatchedEnviron(**{orig_envvar: meipass}): - original_env = dict(os.environ) - assert orig_envvar in original_env - instance = overrides.PyinstallerTerminal( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_vt_terminal_environ_cleanup_original_passed_directly(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - env = { - orig_envvar: meipass, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerTerminal( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_vt_terminal_environ_cleanup(envvar, meipass): - with PatchedEnviron(**{envvar: meipass}): - original_env = dict(os.environ) - assert envvar in original_env - instance = overrides.PyinstallerTerminal( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == "" - - -def test_vt_terminal_environ_cleanup_passed_directly_not_removed(envvar, meipass): - env = { - envvar: envvar, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerTerminal( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stream_stdout=False, - stream_stderr=False, - ) - buffer_o = buffer_e = "" - while instance.has_unread_data: - stdout, stderr = instance.recv() - if stdout: - buffer_o += stdout - if stderr: - buffer_e += stderr - instance.terminate() - - assert instance.exitstatus == 0 - returned_env = json.loads(buffer_o) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == envvar diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py b/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py deleted file mode 100644 index 836e392d016a..000000000000 --- a/tests/pytests/functional/utils/pyinstaller/rthooks/test_subprocess.py +++ /dev/null @@ -1,111 +0,0 @@ -import json -import os -import subprocess -import sys - -import pytest - -import salt.utils.pyinstaller.rthooks._overrides as overrides -from tests.support import mock -from tests.support.helpers import PatchedEnviron - - -@pytest.fixture(params=("LD_LIBRARY_PATH", "LIBPATH")) -def envvar(request): - return request.param - - -@pytest.fixture -def meipass(envvar): - with mock.patch("salt.utils.pyinstaller.rthooks._overrides.sys") as patched_sys: - patched_sys._MEIPASS = "{}_VALUE".format(envvar) - assert overrides.sys._MEIPASS == "{}_VALUE".format(envvar) - yield "{}_VALUE".format(envvar) - assert not hasattr(sys, "_MEIPASS") - assert not hasattr(overrides.sys, "_MEIPASS") - - -def test_subprocess_popen_environ_cleanup_original(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - with PatchedEnviron(**{orig_envvar: meipass}): - original_env = dict(os.environ) - assert orig_envvar in original_env - instance = overrides.PyinstallerPopen( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_subprocess_popen_environ_cleanup_original_passed_directly(envvar, meipass): - orig_envvar = "{}_ORIG".format(envvar) - env = { - orig_envvar: meipass, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerPopen( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert orig_envvar not in returned_env - assert returned_env[envvar] == meipass - - -def test_subprocess_popen_environ_cleanup(envvar, meipass): - with PatchedEnviron(**{envvar: meipass}): - original_env = dict(os.environ) - assert envvar in original_env - instance = overrides.PyinstallerPopen( - [ - sys.executable, - "-c", - "import os, json; print(json.dumps(dict(os.environ)))", - ], - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == "" - - -def test_subprocess_popen_environ_cleanup_passed_directly_not_removed(envvar, meipass): - env = { - envvar: envvar, - } - original_env = dict(os.environ) - - instance = overrides.PyinstallerPopen( - [sys.executable, "-c", "import os, json; print(json.dumps(dict(os.environ)))"], - env=env.copy(), - stdout=subprocess.PIPE, - universal_newlines=True, - ) - stdout, _ = instance.communicate() - assert instance.returncode == 0 - returned_env = json.loads(stdout) - assert returned_env != original_env - assert envvar in returned_env - assert returned_env[envvar] == envvar diff --git a/tests/pytests/functional/utils/test_cache.py b/tests/pytests/functional/utils/test_cache.py new file mode 100644 index 000000000000..d405b8246fc0 --- /dev/null +++ b/tests/pytests/functional/utils/test_cache.py @@ -0,0 +1,83 @@ +import os + +import pytest + +import salt.utils.cache +import salt.utils.files +import salt.utils.path +import salt.version + +_DUMMY_FILES = ( + "data.txt", + "foo.t2", + "bar.t3", + "nested/test", + "nested/cache.txt", + "n/n1/n2/n3/n4/n5", +) + + +def _make_dummy_files(tmp_path): + for full_path in _DUMMY_FILES: + full_path = salt.utils.path.join(tmp_path, full_path) + path, _ = os.path.split(full_path) + if not os.path.isdir(path): + os.makedirs(path) + with salt.utils.files.fopen(full_path, "w") as file: + file.write("data") + + +def _dummy_files_exists(tmp_path): + """ + True if all files exists + False if all files are missing + None if some files exists and others are missing + """ + ret = None + for full_path in _DUMMY_FILES: + full_path = salt.utils.path.join(tmp_path, full_path) + is_file = os.path.isfile(full_path) + if ret is None: + ret = is_file + elif ret is not is_file: + return None # Some files are found and others are missing + return ret + + +def test_verify_cache_version_bad_path(): + with pytest.raises(ValueError): + # cache version should fail if given bad file python + salt.utils.cache.verify_cache_version("\0/bad/path") + + +def test_verify_cache_version(tmp_path): + # cache version should make dir if it does not exist + tmp_path = str(salt.utils.path.join(str(tmp_path), "work", "salt")) + cache_version = salt.utils.path.join(tmp_path, "cache_version") + + # check that cache clears when no cache_version is present + _make_dummy_files(tmp_path) + assert salt.utils.cache.verify_cache_version(tmp_path) is False + assert _dummy_files_exists(tmp_path) is False + + # check that cache_version has correct salt version + with salt.utils.files.fopen(cache_version, "r") as file: + assert "\n".join(file.readlines()) == salt.version.__version__ + + # check that cache does not get clear when check is called multiple times + _make_dummy_files(tmp_path) + for _ in range(3): + assert salt.utils.cache.verify_cache_version(tmp_path) is True + assert _dummy_files_exists(tmp_path) is True + + # check that cache clears when a different version is present + with salt.utils.files.fopen(cache_version, "w") as file: + file.write("-1") + assert salt.utils.cache.verify_cache_version(tmp_path) is False + assert _dummy_files_exists(tmp_path) is False + + # check that cache does not get clear when check is called multiple times + _make_dummy_files(tmp_path) + for _ in range(3): + assert salt.utils.cache.verify_cache_version(tmp_path) is True + assert _dummy_files_exists(tmp_path) is True diff --git a/tests/pytests/functional/utils/test_gitfs.py b/tests/pytests/functional/utils/test_gitfs.py new file mode 100644 index 000000000000..30a5f147faaf --- /dev/null +++ b/tests/pytests/functional/utils/test_gitfs.py @@ -0,0 +1,275 @@ +import os.path + +import pytest + +from salt.fileserver.gitfs import PER_REMOTE_ONLY, PER_REMOTE_OVERRIDES +from salt.utils.gitfs import GitFS, GitPython, Pygit2 +from salt.utils.immutabletypes import ImmutableDict, ImmutableList + +pytestmark = [ + pytest.mark.slow_test, +] + + +try: + import git # pylint: disable=unused-import + + HAS_GITPYTHON = True +except ImportError: + HAS_GITPYTHON = False + + +try: + import pygit2 # pylint: disable=unused-import + + HAS_PYGIT2 = True +except ImportError: + HAS_PYGIT2 = False + + +skipif_no_gitpython = pytest.mark.skipif(not HAS_GITPYTHON, reason="Missing gitpython") +skipif_no_pygit2 = pytest.mark.skipif(not HAS_PYGIT2, reason="Missing pygit2") + + +@pytest.fixture +def gitfs_opts(salt_factories, tmp_path): + config_defaults = {"cachedir": str(tmp_path)} + factory = salt_factories.salt_master_daemon( + "gitfs-functional-master", defaults=config_defaults + ) + config_defaults = dict(factory.config) + for key, item in config_defaults.items(): + if isinstance(item, ImmutableDict): + config_defaults[key] = dict(item) + elif isinstance(item, ImmutableList): + config_defaults[key] = list(item) + return config_defaults + + +@pytest.fixture +def gitpython_gitfs_opts(gitfs_opts): + gitfs_opts["verified_gitfs_provider"] = "gitpython" + GitFS.instance_map.clear() # wipe instance_map object map for clean run + return gitfs_opts + + +@pytest.fixture +def pygit2_gitfs_opts(gitfs_opts): + gitfs_opts["verified_gitfs_provider"] = "pygit2" + GitFS.instance_map.clear() # wipe instance_map object map for clean run + return gitfs_opts + + +def _get_gitfs(opts, *remotes): + return GitFS( + opts, + remotes, + per_remote_overrides=PER_REMOTE_OVERRIDES, + per_remote_only=PER_REMOTE_ONLY, + ) + + +def _test_gitfs_simple(gitfs_opts): + g = _get_gitfs( + gitfs_opts, + {"https://github.com/saltstack/salt-test-pillar-gitfs.git": [{"name": "bob"}]}, + ) + g.fetch_remotes() + assert len(g.remotes) == 1 + assert set(g.file_list({"saltenv": "main"})) == {".gitignore", "README.md"} + + +@skipif_no_gitpython +def test_gitpython_gitfs_simple(gitpython_gitfs_opts): + _test_gitfs_simple(gitpython_gitfs_opts) + + +@skipif_no_pygit2 +def test_pygit2_gitfs_simple(pygit2_gitfs_opts): + _test_gitfs_simple(pygit2_gitfs_opts) + + +def _test_gitfs_simple_base(gitfs_opts): + g = _get_gitfs( + gitfs_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + g.fetch_remotes() + assert len(g.remotes) == 1 + assert set(g.file_list({"saltenv": "base"})) == { + ".gitignore", + "README.md", + "file.sls", + "top.sls", + } + + +@skipif_no_gitpython +def test_gitpython_gitfs_simple_base(gitpython_gitfs_opts): + _test_gitfs_simple_base(gitpython_gitfs_opts) + + +@skipif_no_pygit2 +def test_pygit2_gitfs_simple_base(pygit2_gitfs_opts): + _test_gitfs_simple_base(pygit2_gitfs_opts) + + +@skipif_no_gitpython +def test_gitpython_gitfs_provider(gitpython_gitfs_opts): + g = _get_gitfs( + gitpython_gitfs_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + assert len(g.remotes) == 1 + assert g.provider == "gitpython" + assert isinstance(g.remotes[0], GitPython) + + +@skipif_no_pygit2 +def test_pygit2_gitfs_provider(pygit2_gitfs_opts): + g = _get_gitfs( + pygit2_gitfs_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + assert len(g.remotes) == 1 + assert g.provider == "pygit2" + assert isinstance(g.remotes[0], Pygit2) + + +def _test_gitfs_minion(gitfs_opts): + gitfs_opts["__role"] = "minion" + g = _get_gitfs( + gitfs_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + g.fetch_remotes() + assert len(g.remotes) == 1 + assert set(g.file_list({"saltenv": "base"})) == { + ".gitignore", + "README.md", + "file.sls", + "top.sls", + } + assert set(g.file_list({"saltenv": "main"})) == {".gitignore", "README.md"} + + +@skipif_no_gitpython +def test_gitpython_gitfs_minion(gitpython_gitfs_opts): + _test_gitfs_minion(gitpython_gitfs_opts) + + +@skipif_no_pygit2 +def test_pygit2_gitfs_minion(pygit2_gitfs_opts): + _test_gitfs_minion(pygit2_gitfs_opts) + + +def _test_fetch_request_with_mountpoint(opts): + mpoint = [{"mountpoint": "salt/m"}] + p = _get_gitfs( + opts, + {"https://github.com/saltstack/salt-test-pillar-gitfs.git": mpoint}, + ) + p.fetch_remotes() + assert len(p.remotes) == 1 + repo = p.remotes[0] + assert repo.mountpoint("testmount") == "salt/m" + assert set(p.file_list({"saltenv": "testmount"})) == { + "salt/m/test_dir1/testfile3", + "salt/m/test_dir1/test_dir2/testfile2", + "salt/m/.gitignore", + "salt/m/README.md", + "salt/m/test_dir1/test_dir2/testfile1", + } + + +@skipif_no_gitpython +def test_gitpython_fetch_request_with_mountpoint(gitpython_gitfs_opts): + _test_fetch_request_with_mountpoint(gitpython_gitfs_opts) + + +@skipif_no_pygit2 +def test_pygit2_fetch_request_with_mountpoint(pygit2_gitfs_opts): + _test_fetch_request_with_mountpoint(pygit2_gitfs_opts) + + +def _test_name(opts): + p = _get_gitfs( + opts, + { + "https://github.com/saltstack/salt-test-pillar-gitfs.git": [ + {"name": "name1"} + ] + }, + { + "https://github.com/saltstack/salt-test-pillar-gitfs.git": [ + {"name": "name2"} + ] + }, + ) + p.fetch_remotes() + assert len(p.remotes) == 2 + repo = p.remotes[0] + repo2 = p.remotes[1] + assert repo.get_cache_basehash() == "name1" + assert repo2.get_cache_basehash() == "name2" + + +@skipif_no_gitpython +def test_gitpython_name(gitpython_gitfs_opts): + _test_name(gitpython_gitfs_opts) + + +@skipif_no_pygit2 +def test_pygit2_name(pygit2_gitfs_opts): + _test_name(pygit2_gitfs_opts) + + +def _test_remote_map(opts): + p = _get_gitfs( + opts, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + p.fetch_remotes() + assert len(p.remotes) == 1 + assert os.path.isfile(os.path.join(opts["cachedir"], "gitfs", "remote_map.txt")) + + +@skipif_no_gitpython +def test_gitpython_remote_map(gitpython_gitfs_opts): + _test_remote_map(gitpython_gitfs_opts) + + +@skipif_no_pygit2 +def test_pygit2_remote_map(pygit2_gitfs_opts): + _test_remote_map(pygit2_gitfs_opts) + + +def _test_lock(opts): + g = _get_gitfs( + opts, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + g.fetch_remotes() + assert len(g.remotes) == 1 + repo = g.remotes[0] + assert repo.get_salt_working_dir() in repo._get_lock_file() + assert repo.lock() == ( + [ + "Set update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ], + [], + ) + assert os.path.isfile(repo._get_lock_file()) + assert repo.clear_lock() == ( + [ + "Removed update lock for gitfs remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ], + [], + ) + assert not os.path.isfile(repo._get_lock_file()) + + +@skipif_no_gitpython +def test_gitpython_lock(gitpython_gitfs_opts): + _test_lock(gitpython_gitfs_opts) + + +@skipif_no_pygit2 +def test_pygit2_lock(pygit2_gitfs_opts): + _test_lock(pygit2_gitfs_opts) diff --git a/tests/pytests/functional/utils/test_http.py b/tests/pytests/functional/utils/test_http.py new file mode 100644 index 000000000000..e6e48d605709 --- /dev/null +++ b/tests/pytests/functional/utils/test_http.py @@ -0,0 +1,154 @@ +import shutil +import tarfile + +import pytest +from pytestshellutils.utils import ports +from saltfactories.utils import random_string + +import salt.utils.http + + +@pytest.mark.parametrize("backend", ["requests", "urllib2", "tornado"]) +def test_decode_body(webserver, integration_files_dir, backend): + with tarfile.open(integration_files_dir / "test.tar.gz", "w:gz") as tar: + tar.add(integration_files_dir / "this.txt") + + ret = salt.utils.http.query( + webserver.url("test.tar.gz"), backend=backend, decode_body=False + ) + assert isinstance(ret["body"], bytes) + + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.skip_if_binaries_missing("docker", "dockerd", check_all=False), +] + + +@pytest.fixture(scope="module") +def tinyproxy_port(): + return ports.get_unused_localhost_port() + + +@pytest.fixture(scope="module") +def tinyproxy_user(): + return random_string("tinyproxy-user-") + + +@pytest.fixture(scope="module") +def tinyproxy_pass(): + return random_string("tinyproxy-pass-") + + +@pytest.fixture(params=[True, False], ids=lambda x: "basic-auth" if x else "no-auth") +def tinyproxy_basic_auth(request): + return request.param + + +@pytest.fixture(params=[True, False], ids=lambda x: "no-proxy" if x else "with-proxy") +def no_proxy(request): + return request.param + + +@pytest.fixture(params=["POST", "GET"], ids=lambda x: x) +def http_method(request): + return request.param + + +@pytest.fixture(scope="module") +def tinyproxy_dir(tmp_path_factory): + try: + dirname = tmp_path_factory.mktemp("tinyproxy") + yield dirname + finally: + shutil.rmtree(dirname, ignore_errors=True) + + +@pytest.fixture +def tinyproxy_conf( + tinyproxy_dir, tinyproxy_port, tinyproxy_user, tinyproxy_pass, tinyproxy_basic_auth +): + basic_auth = ( + f"\nBasicAuth {tinyproxy_user} {tinyproxy_pass}" if tinyproxy_basic_auth else "" + ) + conf = """Port {port} +Listen 127.0.0.1 +Timeout 600 +Allow 127.0.0.1 +AddHeader "X-Tinyproxy-Header" "Test custom tinyproxy header"{auth} + """.format( + port=tinyproxy_port, auth=basic_auth + ) + (tinyproxy_dir / "tinyproxy.conf").write_text(conf) + + +@pytest.fixture +def tinyproxy_container( + salt_factories, + tinyproxy_conf, + tinyproxy_dir, +): + container = salt_factories.get_container( + "tinyproxy", + image_name="ghcr.io/saltstack/salt-ci-containers/tinyproxy:latest", + container_run_kwargs={ + "network_mode": "host", + "volumes": {str(tinyproxy_dir): {"bind": "/etc/tinyproxy", "mode": "z"}}, + }, + pull_before_start=True, + skip_on_pull_failure=True, + skip_if_docker_client_not_connectable=True, + ) + with container.started() as factory: + yield factory + + +@pytest.mark.parametrize("backend", ["requests", "tornado", "urllib2"]) +def test_real_proxy( + tinyproxy_container, + httpserver, + tinyproxy_port, + tinyproxy_user, + tinyproxy_pass, + backend, + tinyproxy_basic_auth, + no_proxy, + http_method, +): + data = b"mydatahere" + opts = { + "proxy_host": "localhost", + "proxy_port": tinyproxy_port, + } + if tinyproxy_basic_auth: + opts.update( + { + "proxy_username": tinyproxy_user, + "proxy_password": tinyproxy_pass, + } + ) + + # Expecting the headers allows verification that it went through the proxy without looking at the logs + if no_proxy: + opts["no_proxy"] = ["random.hostname.io", httpserver.host] + httpserver.expect_request( + "/real_proxy_test", + ).respond_with_data(data, content_type="application/octet-stream") + else: + httpserver.expect_request( + "/real_proxy_test", + headers={"X-Tinyproxy-Header": "Test custom tinyproxy header"}, + ).respond_with_data(data, content_type="application/octet-stream") + url = httpserver.url_for("/real_proxy_test").replace("localhost", "127.0.0.1") + + # We just want to be sure that it's using the proxy + ret = salt.utils.http.query( + url, + method=http_method, + data=data, + backend=backend, + opts=opts, + decode_body=False, + ) + body = ret.get("body", "") + assert body == data diff --git a/tests/pytests/functional/utils/test_job.py b/tests/pytests/functional/utils/test_job.py new file mode 100644 index 000000000000..8c269dbcccfa --- /dev/null +++ b/tests/pytests/functional/utils/test_job.py @@ -0,0 +1,38 @@ +import pathlib + +import salt.utils.job + + +def test_store_job_save_load(minion_opts, tmp_path): + """ + Test to ensure we create the correct files when we + store a job in the local cache + """ + opts = minion_opts.copy() + opts["master_job_cache"] = "local_cache" + opts["job_cache"] = True + opts["ext_job_cache"] = "" + cache_dir = pathlib.Path(opts["cachedir"], "jobs") + master_minion = "test_master" + load = { + "id": master_minion, + "tgt": master_minion, + "jid": "20230822145508520090", + "return": { + "fun": "runner.test.arg", + "jid": "20230822145508520090", + "user": "sudo_ch3ll", + "fun_args": ["go", "home"], + "_stamp": "2023-08-22T14:55:08.796680", + "return": {"args": ("go", "home"), "kwargs": {}}, + "success": True, + }, + } + salt.utils.job.store_job(opts, load) + job_dir = list(list(cache_dir.iterdir())[0].iterdir())[0] + return_p = job_dir / master_minion / "return.p" + load_p = job_dir / ".load.p" + jid = job_dir / "jid" + assert return_p.is_file() + assert load_p.is_file() + assert jid.is_file() diff --git a/tests/pytests/functional/utils/test_pillar.py b/tests/pytests/functional/utils/test_pillar.py new file mode 100644 index 000000000000..143edbf6ff5f --- /dev/null +++ b/tests/pytests/functional/utils/test_pillar.py @@ -0,0 +1,365 @@ +import os + +import pytest + +from salt.pillar.git_pillar import GLOBAL_ONLY, PER_REMOTE_ONLY, PER_REMOTE_OVERRIDES +from salt.utils.gitfs import GitPillar, GitPython, Pygit2 +from salt.utils.immutabletypes import ImmutableDict, ImmutableList + +pytestmark = [ + pytest.mark.slow_test, +] + + +try: + import git # pylint: disable=unused-import + + HAS_GITPYTHON = True +except ImportError: + HAS_GITPYTHON = False + + +try: + import pygit2 # pylint: disable=unused-import + + HAS_PYGIT2 = True +except ImportError: + HAS_PYGIT2 = False + + +skipif_no_gitpython = pytest.mark.skipif(not HAS_GITPYTHON, reason="Missing gitpython") +skipif_no_pygit2 = pytest.mark.skipif(not HAS_PYGIT2, reason="Missing pygit2") + + +@pytest.fixture +def pillar_opts(salt_factories, tmp_path): + config_defaults = {"cachedir": str(tmp_path)} + factory = salt_factories.salt_master_daemon( + "pillar-functional-master", defaults=config_defaults + ) + config_defaults = dict(factory.config) + for key, item in config_defaults.items(): + if isinstance(item, ImmutableDict): + config_defaults[key] = dict(item) + elif isinstance(item, ImmutableList): + config_defaults[key] = list(item) + return config_defaults + + +@pytest.fixture +def gitpython_pillar_opts(pillar_opts): + pillar_opts["verified_git_pillar_provider"] = "gitpython" + return pillar_opts + + +@pytest.fixture +def pygit2_pillar_opts(pillar_opts): + pillar_opts["verified_git_pillar_provider"] = "pygit2" + return pillar_opts + + +def _get_pillar(opts, *remotes): + return GitPillar( + opts, + remotes, + per_remote_overrides=PER_REMOTE_OVERRIDES, + per_remote_only=PER_REMOTE_ONLY, + global_only=GLOBAL_ONLY, + ) + + +@skipif_no_gitpython +def test_gitpython_pillar_provider(gitpython_pillar_opts): + p = _get_pillar( + gitpython_pillar_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + assert len(p.remotes) == 1 + assert p.provider == "gitpython" + assert isinstance(p.remotes[0], GitPython) + + +@skipif_no_pygit2 +def test_pygit2_pillar_provider(pygit2_pillar_opts): + p = _get_pillar( + pygit2_pillar_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + assert len(p.remotes) == 1 + assert p.provider == "pygit2" + assert isinstance(p.remotes[0], Pygit2) + + +def _test_env(opts): + p = _get_pillar( + opts, "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + assert len(p.remotes) == 1 + p.checkout() + repo = p.remotes[0] + # test that two different pillarenvs can exist at the same time + files = set(os.listdir(repo.get_cachedir())) + for f in (".gitignore", "README.md", "file.sls", "top.sls"): + assert f in files + opts["pillarenv"] = "main" + p2 = _get_pillar( + opts, "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + assert len(p.remotes) == 1 + p2.checkout() + repo2 = p2.remotes[0] + files = set(os.listdir(repo2.get_cachedir())) + for f in (".gitignore", "README.md"): + assert f in files + for f in ("file.sls", "top.sls", "back.sls", "rooms.sls"): + assert f not in files + assert repo.get_cachedir() != repo2.get_cachedir() + files = set(os.listdir(repo.get_cachedir())) + for f in (".gitignore", "README.md", "file.sls", "top.sls"): + assert f in files + + # double check cache paths + assert ( + repo.get_cache_hash() == repo2.get_cache_hash() + ) # __env__ repos share same hash + assert repo.get_cache_basename() != repo2.get_cache_basename() + assert repo.get_linkdir() != repo2.get_linkdir() + assert repo.get_salt_working_dir() != repo2.get_salt_working_dir() + assert repo.get_cache_basename() == "master" + assert repo2.get_cache_basename() == "main" + + assert repo.get_cache_basename() in repo.get_cachedir() + assert ( + os.path.join(repo.get_cache_basehash(), repo.get_cache_basename()) + == repo.get_cache_full_basename() + ) + assert repo.get_linkdir() not in repo.get_cachedir() + assert repo.get_salt_working_dir() not in repo.get_cachedir() + + +@skipif_no_gitpython +def test_gitpython_env(gitpython_pillar_opts): + _test_env(gitpython_pillar_opts) + + +@skipif_no_pygit2 +def test_pygit2_env(pygit2_pillar_opts): + _test_env(pygit2_pillar_opts) + + +def _test_checkout_fetch_on_fail(opts): + p = _get_pillar(opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git") + p.checkout(fetch_on_fail=False) # TODO write me + + +@skipif_no_gitpython +def test_gitpython_checkout_fetch_on_fail(gitpython_pillar_opts): + _test_checkout_fetch_on_fail(gitpython_pillar_opts) + + +@skipif_no_pygit2 +def test_pygit2_checkout_fetch_on_fail(pygit2_pillar_opts): + _test_checkout_fetch_on_fail(pygit2_pillar_opts) + + +def _test_multiple_repos(opts): + p = _get_pillar( + opts, + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git", + "main https://github.com/saltstack/salt-test-pillar-gitfs.git", + "branch https://github.com/saltstack/salt-test-pillar-gitfs.git", + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + ) + p.checkout() + assert len(p.remotes) == 5 + # make sure all repos dont share cache and working dir + assert len({r.get_cachedir() for r in p.remotes}) == 5 + assert len({r.get_salt_working_dir() for r in p.remotes}) == 5 + + p2 = _get_pillar( + opts, + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git", + "main https://github.com/saltstack/salt-test-pillar-gitfs.git", + "branch https://github.com/saltstack/salt-test-pillar-gitfs.git", + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + ) + p2.checkout() + assert len(p2.remotes) == 5 + # make sure that repos are given same cache dir + for repo, repo2 in zip(p.remotes, p2.remotes): + assert repo.get_cachedir() == repo2.get_cachedir() + assert repo.get_salt_working_dir() == repo2.get_salt_working_dir() + opts["pillarenv"] = "main" + p3 = _get_pillar( + opts, + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git", + "main https://github.com/saltstack/salt-test-pillar-gitfs.git", + "branch https://github.com/saltstack/salt-test-pillar-gitfs.git", + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + ) + p3.checkout() + # check that __env__ has different cache with different pillarenv + assert p.remotes[0].get_cachedir() != p3.remotes[0].get_cachedir() + assert p.remotes[1].get_cachedir() == p3.remotes[1].get_cachedir() + assert p.remotes[2].get_cachedir() == p3.remotes[2].get_cachedir() + assert p.remotes[3].get_cachedir() != p3.remotes[3].get_cachedir() + assert p.remotes[4].get_cachedir() == p3.remotes[4].get_cachedir() + + # check that other branch data is in cache + files = set(os.listdir(p.remotes[4].get_cachedir())) + for f in (".gitignore", "README.md", "file.sls", "top.sls", "other_env.sls"): + assert f in files + + +@skipif_no_gitpython +def test_gitpython_multiple_repos(gitpython_pillar_opts): + _test_multiple_repos(gitpython_pillar_opts) + + +@skipif_no_pygit2 +def test_pygit2_multiple_repos(pygit2_pillar_opts): + _test_multiple_repos(pygit2_pillar_opts) + + +def _test_fetch_request(opts): + p = _get_pillar( + opts, + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git", + "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + ) + frequest = os.path.join(p.remotes[0].get_salt_working_dir(), "fetch_request") + frequest_other = os.path.join(p.remotes[1].get_salt_working_dir(), "fetch_request") + opts["pillarenv"] = "main" + p2 = _get_pillar( + opts, "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + frequest2 = os.path.join(p2.remotes[0].get_salt_working_dir(), "fetch_request") + assert frequest != frequest2 + assert os.path.isfile(frequest) is False + assert os.path.isfile(frequest2) is False + assert os.path.isfile(frequest_other) is False + p.fetch_remotes() + assert os.path.isfile(frequest) is False + # fetch request was placed + assert os.path.isfile(frequest2) is True + p2.checkout() + # fetch request was found + assert os.path.isfile(frequest2) is False + p2.fetch_remotes() + assert os.path.isfile(frequest) is True + assert os.path.isfile(frequest2) is False + assert os.path.isfile(frequest_other) is False + for _ in range(3): + p2.fetch_remotes() + assert os.path.isfile(frequest) is True + assert os.path.isfile(frequest2) is False + assert os.path.isfile(frequest_other) is False + # fetch request should still be processed even on fetch_on_fail=False + p.checkout(fetch_on_fail=False) + assert os.path.isfile(frequest) is False + assert os.path.isfile(frequest2) is False + assert os.path.isfile(frequest_other) is False + + +@skipif_no_gitpython +def test_gitpython_fetch_request(gitpython_pillar_opts): + _test_fetch_request(gitpython_pillar_opts) + + +@skipif_no_pygit2 +def test_pygit2_fetch_request(pygit2_pillar_opts): + _test_fetch_request(pygit2_pillar_opts) + + +def _test_clear_old_remotes(opts): + p = _get_pillar( + opts, + "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git", + "other https://github.com/saltstack/salt-test-pillar-gitfs-2.git", + ) + repo = p.remotes[0] + repo2 = p.remotes[1] + opts["pillarenv"] = "main" + p2 = _get_pillar( + opts, "__env__ https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + repo3 = p2.remotes[0] + assert os.path.isdir(repo.get_cachedir()) is True + assert os.path.isdir(repo2.get_cachedir()) is True + assert os.path.isdir(repo3.get_cachedir()) is True + p.clear_old_remotes() + assert os.path.isdir(repo.get_cachedir()) is True + assert os.path.isdir(repo2.get_cachedir()) is True + assert os.path.isdir(repo3.get_cachedir()) is True + p2.clear_old_remotes() + assert os.path.isdir(repo.get_cachedir()) is True + assert os.path.isdir(repo2.get_cachedir()) is False + assert os.path.isdir(repo3.get_cachedir()) is True + + +@skipif_no_gitpython +def test_gitpython_clear_old_remotes(gitpython_pillar_opts): + _test_clear_old_remotes(gitpython_pillar_opts) + + +@skipif_no_pygit2 +def test_pygit2_clear_old_remotes(pygit2_pillar_opts): + _test_clear_old_remotes(pygit2_pillar_opts) + + +def _test_remote_map(opts): + p = _get_pillar( + opts, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + p.fetch_remotes() + assert len(p.remotes) == 1 + assert os.path.isfile( + os.path.join(opts["cachedir"], "git_pillar", "remote_map.txt") + ) + + +@skipif_no_gitpython +def test_gitpython_remote_map(gitpython_pillar_opts): + _test_remote_map(gitpython_pillar_opts) + + +@skipif_no_pygit2 +def test_pygit2_remote_map(pygit2_pillar_opts): + _test_remote_map(pygit2_pillar_opts) + + +def _test_lock(opts): + p = _get_pillar( + opts, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + p.fetch_remotes() + assert len(p.remotes) == 1 + repo = p.remotes[0] + assert repo.get_salt_working_dir() in repo._get_lock_file() + assert repo.lock() == ( + [ + "Set update lock for git_pillar remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ], + [], + ) + assert os.path.isfile(repo._get_lock_file()) + assert repo.clear_lock() == ( + [ + "Removed update lock for git_pillar remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ], + [], + ) + assert not os.path.isfile(repo._get_lock_file()) + + +@skipif_no_gitpython +def test_gitpython_lock(gitpython_pillar_opts): + _test_lock(gitpython_pillar_opts) + + +@skipif_no_pygit2 +def test_pygit2_lock(pygit2_pillar_opts): + _test_lock(pygit2_pillar_opts) diff --git a/tests/pytests/functional/utils/test_winrepo.py b/tests/pytests/functional/utils/test_winrepo.py new file mode 100644 index 000000000000..117d995bba6a --- /dev/null +++ b/tests/pytests/functional/utils/test_winrepo.py @@ -0,0 +1,164 @@ +import os + +import pytest + +from salt.runners.winrepo import GLOBAL_ONLY, PER_REMOTE_ONLY, PER_REMOTE_OVERRIDES +from salt.utils.gitfs import GitPython, Pygit2, WinRepo +from salt.utils.immutabletypes import ImmutableDict, ImmutableList + +pytestmark = [ + pytest.mark.slow_test, +] + + +try: + import git # pylint: disable=unused-import + + HAS_GITPYTHON = True +except ImportError: + HAS_GITPYTHON = False + + +try: + import pygit2 # pylint: disable=unused-import + + HAS_PYGIT2 = True +except ImportError: + HAS_PYGIT2 = False + + +skipif_no_gitpython = pytest.mark.skipif(not HAS_GITPYTHON, reason="Missing gitpython") +skipif_no_pygit2 = pytest.mark.skipif(not HAS_PYGIT2, reason="Missing pygit2") + + +@pytest.fixture +def winrepo_opts(salt_factories, tmp_path): + config_defaults = {"cachedir": str(tmp_path)} + factory = salt_factories.salt_master_daemon( + "winrepo-functional-master", defaults=config_defaults + ) + config_defaults = dict(factory.config) + for key, item in config_defaults.items(): + if isinstance(item, ImmutableDict): + config_defaults[key] = dict(item) + elif isinstance(item, ImmutableList): + config_defaults[key] = list(item) + return config_defaults + + +@pytest.fixture +def gitpython_winrepo_opts(winrepo_opts): + winrepo_opts["verified_winrepo_provider"] = "gitpython" + return winrepo_opts + + +@pytest.fixture +def pygit2_winrepo_opts(winrepo_opts): + winrepo_opts["verified_winrepo_provider"] = "pygit2" + return winrepo_opts + + +def _get_winrepo(opts, *remotes): + return WinRepo( + opts, + remotes, + per_remote_overrides=PER_REMOTE_OVERRIDES, + per_remote_only=PER_REMOTE_ONLY, + global_only=GLOBAL_ONLY, + ) + + +@skipif_no_gitpython +def test_gitpython_winrepo_provider(gitpython_winrepo_opts): + w = _get_winrepo( + gitpython_winrepo_opts, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + assert len(w.remotes) == 1 + assert w.provider == "gitpython" + assert isinstance(w.remotes[0], GitPython) + + +@skipif_no_pygit2 +def test_pygit2_winrepo_provider(pygit2_winrepo_opts): + w = _get_winrepo( + pygit2_winrepo_opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git" + ) + assert len(w.remotes) == 1 + assert w.provider == "pygit2" + assert isinstance(w.remotes[0], Pygit2) + + +def _test_winrepo_simple(opts): + w = _get_winrepo(opts, "https://github.com/saltstack/salt-test-pillar-gitfs.git") + assert len(w.remotes) == 1 + w.checkout() + repo = w.remotes[0] + files = set(os.listdir(repo.get_cachedir())) + for f in (".gitignore", "README.md", "file.sls", "top.sls"): + assert f in files + + +@skipif_no_gitpython +def test_gitpython_winrepo_simple(gitpython_winrepo_opts): + _test_winrepo_simple(gitpython_winrepo_opts) + + +@skipif_no_pygit2 +def test_pygit2_winrepo_simple(pygit2_winrepo_opts): + _test_winrepo_simple(pygit2_winrepo_opts) + + +def _test_remote_map(opts): + p = _get_winrepo( + opts, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + p.fetch_remotes() + assert len(p.remotes) == 1 + assert os.path.isfile(os.path.join(opts["cachedir"], "winrepo", "remote_map.txt")) + + +@skipif_no_gitpython +def test_gitpython_remote_map(gitpython_winrepo_opts): + _test_remote_map(gitpython_winrepo_opts) + + +@skipif_no_pygit2 +def test_pygit2_remote_map(pygit2_winrepo_opts): + _test_remote_map(pygit2_winrepo_opts) + + +def _test_lock(opts): + w = _get_winrepo( + opts, + "https://github.com/saltstack/salt-test-pillar-gitfs.git", + ) + w.fetch_remotes() + assert len(w.remotes) == 1 + repo = w.remotes[0] + assert repo.get_salt_working_dir() in repo._get_lock_file() + assert repo.lock() == ( + [ + "Set update lock for winrepo remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ], + [], + ) + assert os.path.isfile(repo._get_lock_file()) + assert repo.clear_lock() == ( + [ + "Removed update lock for winrepo remote 'https://github.com/saltstack/salt-test-pillar-gitfs.git'" + ], + [], + ) + assert not os.path.isfile(repo._get_lock_file()) + + +@skipif_no_gitpython +def test_gitpython_lock(gitpython_winrepo_opts): + _test_lock(gitpython_winrepo_opts) + + +@skipif_no_pygit2 +def test_pygit2_lock(pygit2_winrepo_opts): + _test_lock(pygit2_winrepo_opts) diff --git a/tests/pytests/functional/utils/user/test__getgrall.py b/tests/pytests/functional/utils/user/test__getgrall.py new file mode 100644 index 000000000000..db994019e607 --- /dev/null +++ b/tests/pytests/functional/utils/user/test__getgrall.py @@ -0,0 +1,44 @@ +from textwrap import dedent + +import pytest + +pytest.importorskip("grp") + +import grp + +import salt.utils.user + + +@pytest.fixture(scope="function") +def etc_group(tmp_path): + etcgrp = tmp_path / "etc" / "group" + etcgrp.parent.mkdir() + etcgrp.write_text( + dedent( + """games:x:50: + docker:x:959:debian,salt + salt:x:1000:""" + ) + ) + return etcgrp + + +def test__getgrall(etc_group): + group_lines = [ + ["games", "x", 50, []], + ["docker", "x", 959, ["debian", "salt"]], + ["salt", "x", 1000, []], + ] + expected_grall = [grp.struct_group(comps) for comps in group_lines] + + grall = salt.utils.user._getgrall(root=str(etc_group.parent.parent)) + + assert grall == expected_grall + + +def test__getgrall_bad_format(etc_group): + with etc_group.open("a") as _fp: + _fp.write("\n# some comment here\n") + + with pytest.raises(IndexError): + salt.utils.user._getgrall(root=str(etc_group.parent.parent)) diff --git a/tests/pytests/functional/utils/win_dacl/test_file.py b/tests/pytests/functional/utils/win_dacl/test_file.py index bb6bd1238696..7de08f034225 100644 --- a/tests/pytests/functional/utils/win_dacl/test_file.py +++ b/tests/pytests/functional/utils/win_dacl/test_file.py @@ -1,7 +1,6 @@ import pytest import salt.utils.win_dacl as win_dacl -from tests.support.mock import patch pytestmark = [ pytest.mark.windows_whitelisted, @@ -819,22 +818,22 @@ def test_check_perms(test_file): def test_check_perms_test_true(test_file): - with patch.dict(win_dacl.__opts__, {"test": True}): - result = win_dacl.check_perms( - obj_name=str(test_file), - obj_type="file", - ret=None, - owner="Users", - grant_perms={"Backup Operators": {"perms": "read"}}, - deny_perms={ - "NETWORK SERVICE": { - "perms": ["delete", "set_value", "write_dac", "write_owner"] - }, - "Backup Operators": {"perms": ["delete"]}, + result = win_dacl.check_perms( + obj_name=str(test_file), + obj_type="file", + ret=None, + owner="Users", + grant_perms={"Backup Operators": {"perms": "read"}}, + deny_perms={ + "NETWORK SERVICE": { + "perms": ["delete", "set_value", "write_dac", "write_owner"] }, - inheritance=True, - reset=False, - ) + "Backup Operators": {"perms": ["delete"]}, + }, + inheritance=True, + reset=False, + test_mode=True, + ) expected = { "changes": { diff --git a/tests/pytests/functional/utils/win_dacl/test_reg.py b/tests/pytests/functional/utils/win_dacl/test_reg.py index af6de169e6c3..870b9765ad60 100644 --- a/tests/pytests/functional/utils/win_dacl/test_reg.py +++ b/tests/pytests/functional/utils/win_dacl/test_reg.py @@ -3,7 +3,6 @@ import salt.utils.win_dacl as win_dacl import salt.utils.win_reg as win_reg -from tests.support.mock import patch pytestmark = [ pytest.mark.windows_whitelisted, @@ -12,15 +11,6 @@ ] -@pytest.fixture -def configure_loader_modules(minion_opts): - return { - win_dacl: { - "__opts__": minion_opts, - }, - } - - @pytest.fixture(scope="module") def fake_key(): return "SOFTWARE\\{}".format(random_string("SaltTesting-", lowercase=False)) @@ -433,22 +423,22 @@ def test_check_perms(reg_key): def test_check_perms_test_true(reg_key): - with patch.dict(win_dacl.__opts__, {"test": True}): - result = win_dacl.check_perms( - obj_name=reg_key, - obj_type="registry", - ret=None, - owner="Users", - grant_perms={"Backup Operators": {"perms": "read"}}, - deny_perms={ - "NETWORK SERVICE": { - "perms": ["delete", "set_value", "write_dac", "write_owner"] - }, - "Backup Operators": {"perms": ["delete"]}, + result = win_dacl.check_perms( + obj_name=reg_key, + obj_type="registry", + ret=None, + owner="Users", + grant_perms={"Backup Operators": {"perms": "read"}}, + deny_perms={ + "NETWORK SERVICE": { + "perms": ["delete", "set_value", "write_dac", "write_owner"] }, - inheritance=True, - reset=False, - ) + "Backup Operators": {"perms": ["delete"]}, + }, + inheritance=True, + reset=False, + test_mode=True, + ) expected = { "changes": { diff --git a/tests/pytests/integration/_logging/test_logging.py b/tests/pytests/integration/_logging/test_logging.py new file mode 100644 index 000000000000..a0fa779308b7 --- /dev/null +++ b/tests/pytests/integration/_logging/test_logging.py @@ -0,0 +1,106 @@ +import logging +import os + +import pytest + +import salt._logging.impl as log_impl +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.skip_on_windows(reason="Temporarily skipped on the newer golden images") +] + + +log = logging.getLogger(__name__) + + +@pytest.fixture +def configure_loader_modules(): + return {log_impl: {}} + + +def log_nameToLevel(name): + """ + Return the numeric representation of textual logging level + """ + # log level values + CRITICAL = 50 + FATAL = CRITICAL + ERROR = 40 + WARNING = 30 + WARN = WARNING + INFO = 20 + DEBUG = 10 + NOTSET = 0 + + _nameToLevel = { + "CRITICAL": CRITICAL, + "FATAL": FATAL, + "ERROR": ERROR, + "WARN": WARNING, + "WARNING": WARNING, + "INFO": INFO, + "DEBUG": DEBUG, + "NOTSET": NOTSET, + } + return _nameToLevel.get(name, None) + + +def test_lowest_log_level(): + ret = log_impl.get_lowest_log_level() + assert ret is None + + log_impl.set_lowest_log_level(log_nameToLevel("DEBUG")) + ret = log_impl.get_lowest_log_level() + assert ret is log_nameToLevel("DEBUG") + + log_impl.set_lowest_log_level(log_nameToLevel("WARNING")) + ret = log_impl.get_lowest_log_level() + assert ret is log_nameToLevel("WARNING") + + opts = {"log_level": "ERROR", "log_level_logfile": "INFO"} + log_impl.set_lowest_log_level_by_opts(opts) + ret = log_impl.get_lowest_log_level() + assert ret is log_nameToLevel("INFO") + + +def test_get_logging_level_from_string(caplog): + ret = log_impl.get_logging_level_from_string(None) + assert ret is log_nameToLevel("WARNING") + + ret = log_impl.get_logging_level_from_string(log_nameToLevel("DEBUG")) + assert ret is log_nameToLevel("DEBUG") + + ret = log_impl.get_logging_level_from_string("CRITICAL") + assert ret is log_nameToLevel("CRITICAL") + + caplog.clear() + with caplog.at_level(logging.WARNING): + msg = "Could not translate the logging level string 'BADLEVEL' into an actual logging level integer. Returning 'logging.ERROR'." + ret = log_impl.get_logging_level_from_string("BADLEVEL") + assert ret is log_nameToLevel("ERROR") + assert msg in caplog.text + + +def test_logfile_handler(caplog): + caplog.clear() + with caplog.at_level(logging.WARNING): + ret = log_impl.is_logfile_handler_configured() + assert ret is False + + msg = "log_path setting is set to `None`. Nothing else to do" + log_path = None + assert log_impl.setup_logfile_handler(log_path) is None + assert msg in caplog.text + + +def test_in_mainprocess(): + ret = log_impl.in_mainprocess() + assert ret is True + + curr_pid = os.getpid() + with patch( + "os.getpid", MagicMock(side_effect=[AttributeError, curr_pid, curr_pid]) + ): + ret = log_impl.in_mainprocess() + assert ret is True diff --git a/tests/pytests/integration/cli/conftest.py b/tests/pytests/integration/cli/conftest.py index 48554dda4d8c..1f87f17e58a9 100644 --- a/tests/pytests/integration/cli/conftest.py +++ b/tests/pytests/integration/cli/conftest.py @@ -2,6 +2,7 @@ @pytest.fixture(scope="package") +@pytest.mark.core_test def salt_eauth_account(salt_eauth_account_factory): with salt_eauth_account_factory as account: yield account diff --git a/tests/pytests/integration/cli/test_batch.py b/tests/pytests/integration/cli/test_batch.py index 81175f62a464..70d66d99598b 100644 --- a/tests/pytests/integration/cli/test_batch.py +++ b/tests/pytests/integration/cli/test_batch.py @@ -8,7 +8,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, - # pytest.mark.slow_test, + pytest.mark.core_test, ] diff --git a/tests/pytests/integration/cli/test_matcher.py b/tests/pytests/integration/cli/test_matcher.py index f64d5b9014a0..2e91eba7ad41 100644 --- a/tests/pytests/integration/cli/test_matcher.py +++ b/tests/pytests/integration/cli/test_matcher.py @@ -5,7 +5,7 @@ import salt.defaults.exitcodes pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] @@ -401,7 +401,7 @@ def test_grains_targeting_minion_id_disconnected(salt_master, salt_minion, salt_ "-G", "test.ping", minion_tgt="id:{}".format(disconnected_minion_id), - _timeout=15, + _timeout=30, ) assert ret.returncode == 1 assert disconnected_minion_id in ret.data diff --git a/tests/pytests/integration/cli/test_salt.py b/tests/pytests/integration/cli/test_salt.py index 2f9ff0e541ee..7f026845843c 100644 --- a/tests/pytests/integration/cli/test_salt.py +++ b/tests/pytests/integration/cli/test_salt.py @@ -19,11 +19,24 @@ log = logging.getLogger(__name__) pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] +@pytest.fixture +def salt_minion_2(salt_master): + """ + A running salt-minion fixture + """ + factory = salt_master.salt_minion_daemon( + "minion-2", + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory + + def test_context_retcode_salt(salt_cli, salt_minion): """ Test that a nonzero retcode set in the context dunder will cause the @@ -132,7 +145,6 @@ def test_exit_status_correct_usage(salt_cli, salt_minion): assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret -@pytest.mark.slow_test @pytest.mark.skip_on_windows(reason="Windows does not support SIGINT") @pytest.mark.skip_initial_onedir_failure def test_interrupt_on_long_running_job(salt_cli, salt_master, salt_minion): @@ -235,3 +247,25 @@ def test_interrupt_on_long_running_job(salt_cli, salt_master, salt_minion): assert "Exiting gracefully on Ctrl-c" in ret.stderr assert "Exception ignored in" not in ret.stderr assert "This job's jid is" in ret.stderr + + +def test_minion_65400(salt_cli, salt_minion, salt_minion_2, salt_master): + """ + Ensure correct exit status when salt CLI starts correctly. + + """ + state = f""" + custom_test_state: + test.configurable_test_state: + - name: example + - changes: True + - result: False + - comment: 65400 regression test + """ + with salt_master.state_tree.base.temp_file("test_65400.sls", state): + ret = salt_cli.run("state.sls", "test_65400", minion_tgt="*") + assert isinstance(ret.data, dict) + assert len(ret.data.keys()) == 2 + for minion_id in ret.data: + assert ret.data[minion_id] != "Error: test.configurable_test_state" + assert isinstance(ret.data[minion_id], dict) diff --git a/tests/pytests/integration/cli/test_salt_auth.py b/tests/pytests/integration/cli/test_salt_auth.py index 53cc92e6abca..adaf235ae21c 100644 --- a/tests/pytests/integration/cli/test_salt_auth.py +++ b/tests/pytests/integration/cli/test_salt_auth.py @@ -5,7 +5,7 @@ log = logging.getLogger(__name__) pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.skip_if_not_root, pytest.mark.destructive_test, pytest.mark.skip_on_windows, diff --git a/tests/pytests/integration/cli/test_salt_call.py b/tests/pytests/integration/cli/test_salt_call.py index b663372ec59e..b1af43050e1d 100644 --- a/tests/pytests/integration/cli/test_salt_call.py +++ b/tests/pytests/integration/cli/test_salt_call.py @@ -15,7 +15,7 @@ from tests.support.helpers import PRE_PYTEST_SKIP, PRE_PYTEST_SKIP_REASON pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] @@ -429,3 +429,74 @@ def test_local_salt_call_no_function_no_retcode(salt_call_cli): assert "test" in ret.data assert ret.data["test"] == "'test' is not available." assert "test.echo" in ret.data + + +def test_state_highstate_custom_grains(salt_master, salt_minion_factory): + """ + This test ensure that custom grains in salt://_grains are loaded before pillar compilation + to ensure that any use of custom grains in pillar files are available, this implies that + a sync of grains occurs before loading the regular /etc/salt/grains or configuration file + grains, as well as the usual grains. + + Note: cannot use salt_minion and salt_call_cli, since these will be loaded before + the pillar and custom_grains files are written, hence using salt_minion_factory. + """ + pillar_top_sls = """ + base: + '*': + - defaults + """ + + pillar_defaults_sls = """ + mypillar: "{{ grains['custom_grain'] }}" + """ + + salt_top_sls = """ + base: + '*': + - test + """ + + salt_test_sls = """ + "donothing": + test.nop: [] + """ + + salt_custom_grains_py = """ + def main(): + return {'custom_grain': 'test_value'} + """ + assert salt_master.is_running() + with salt_minion_factory.started(): + salt_minion = salt_minion_factory + salt_call_cli = salt_minion_factory.salt_call_cli() + with salt_minion.pillar_tree.base.temp_file( + "top.sls", pillar_top_sls + ), salt_minion.pillar_tree.base.temp_file( + "defaults.sls", pillar_defaults_sls + ), salt_minion.state_tree.base.temp_file( + "top.sls", salt_top_sls + ), salt_minion.state_tree.base.temp_file( + "test.sls", salt_test_sls + ), salt_minion.state_tree.base.temp_file( + "_grains/custom_grain.py", salt_custom_grains_py + ): + ret = salt_call_cli.run("--local", "state.highstate") + assert ret.returncode == 0 + ret = salt_call_cli.run("--local", "pillar.items") + assert ret.returncode == 0 + assert ret.data + pillar_items = ret.data + assert "mypillar" in pillar_items + assert pillar_items["mypillar"] == "test_value" + + +def test_salt_call_versions(salt_call_cli, caplog): + """ + Call test.versions without '--local' to test grains + are sync'd without any missing keys in opts + """ + with caplog.at_level(logging.DEBUG): + ret = salt_call_cli.run("test.versions") + assert ret.returncode == 0 + assert "Failed to sync grains module: 'master_uri'" not in caplog.messages diff --git a/tests/pytests/integration/cli/test_salt_cp.py b/tests/pytests/integration/cli/test_salt_cp.py index ebc6afc92331..9c303e7c9b49 100644 --- a/tests/pytests/integration/cli/test_salt_cp.py +++ b/tests/pytests/integration/cli/test_salt_cp.py @@ -32,8 +32,8 @@ def dest_testfile(): dst.unlink() -@pytest.mark.slow_test @pytest.mark.windows_whitelisted +@pytest.mark.core_test def test_cp_testfile(salt_minion, salt_cp_cli, source_testfile, dest_testfile): """ test salt-cp diff --git a/tests/pytests/integration/cli/test_salt_deltaproxy.py b/tests/pytests/integration/cli/test_salt_deltaproxy.py index ac96b38c8483..59071a2d6faa 100644 --- a/tests/pytests/integration/cli/test_salt_deltaproxy.py +++ b/tests/pytests/integration/cli/test_salt_deltaproxy.py @@ -18,7 +18,8 @@ pytestmark = [ pytest.mark.skip_on_spawning_platform( reason="Deltaproxy minions do not currently work on spawning platforms.", - ) + ), + pytest.mark.core_test, ] @@ -50,7 +51,6 @@ def clear_proxy_minions(salt_master, proxy_minion_id): os.unlink(cachefile) -@pytest.mark.slow_test def test_exit_status_no_proxyid(salt_master, proxy_minion_id): """ Ensure correct exit status when --proxyid argument is missing. @@ -92,7 +92,6 @@ def test_exit_status_unknown_user(salt_master, proxy_minion_id): assert "The user is not available." in exc.value.process_result.stderr -@pytest.mark.slow_test def test_exit_status_unknown_argument(salt_master, proxy_minion_id): """ Ensure correct exit status when an unknown argument is passed to @@ -586,3 +585,137 @@ def ping(): # Terminate the proxy minion ret = factory.terminate() assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret + + +@pytest.mark.skip_on_windows(reason=PRE_PYTEST_SKIP_REASON) +@pytest.mark.parametrize( + "parallel_startup", + [True, False], + ids=["parallel_startup=True", "parallel_startup=False"], +) +def test_custom_proxy_module_raise_exception( + salt_master, + salt_cli, + proxy_minion_id, + parallel_startup, + integration_files_dir, +): + """ + Ensure the salt-proxy control proxy starts and + is able to respond to test.ping, additionally ensure that + the proxies being controlled also respond to test.ping. + + Finally ensure correct exit status when salt-proxy exits correctly. + + Skip on Windows because daemonization not supported + """ + + config_defaults = { + "metaproxy": "deltaproxy", + } + proxy_one = "custom_dummy_proxy_one" + proxy_two = "custom_dummy_proxy_two" + + top_file = """ + base: + {control}: + - controlproxy + {one}: + - {one} + {two}: + - {two} + """.format( + control=proxy_minion_id, + one=proxy_one, + two=proxy_two, + ) + controlproxy_pillar_file = """ + proxy: + proxytype: deltaproxy + parallel_startup: {} + ids: + - {} + - {} + """.format( + parallel_startup, proxy_one, proxy_two + ) + + dummy_proxy_one_pillar_file = """ + proxy: + proxytype: custom_dummy + """ + + dummy_proxy_two_pillar_file = """ + proxy: + proxytype: dummy + """ + + module_contents = """ +__proxyenabled__ = ["custom_dummy"] + +def __virtual__(): + return True + +def init(opts): + raise Exception("Something has gone horribly wrong.") + +def ping(): + return True + """ + + top_tempfile = salt_master.pillar_tree.base.temp_file("top.sls", top_file) + controlproxy_tempfile = salt_master.pillar_tree.base.temp_file( + "controlproxy.sls", controlproxy_pillar_file + ) + dummy_proxy_one_tempfile = salt_master.pillar_tree.base.temp_file( + "{}.sls".format(proxy_one), + dummy_proxy_one_pillar_file, + ) + dummy_proxy_two_tempfile = salt_master.pillar_tree.base.temp_file( + "{}.sls".format(proxy_two), + dummy_proxy_two_pillar_file, + ) + + custom_proxy_module = salt_master.state_tree.base.temp_file( + "_proxy/custom_dummy.py", module_contents + ) + with top_tempfile, controlproxy_tempfile, dummy_proxy_one_tempfile, dummy_proxy_two_tempfile, custom_proxy_module: + factory = salt_master.salt_proxy_minion_daemon( + proxy_minion_id, + defaults=config_defaults, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + start_timeout=240, + ) + + for minion_id in (proxy_minion_id, proxy_one, proxy_two): + factory.before_start( + pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id + ) + factory.after_terminate( + pytest.helpers.remove_stale_minion_key, salt_master, minion_id + ) + factory.after_terminate( + pytest.helpers.remove_stale_proxy_minion_cache_file, factory, minion_id + ) + + with factory.started(): + assert factory.is_running() + + # Let's issue a ping the control proxy + ret = salt_cli.run("test.ping", minion_tgt=proxy_minion_id) + assert ret.returncode == 0 + assert ret.data is True + + # Let's issue a ping to one of the controlled proxies + ret = salt_cli.run("test.ping", minion_tgt=proxy_one) + assert ret.returncode == 1 + assert "Minion did not return" in ret.data + + # Let's issue a ping to one of the controlled proxies + ret = salt_cli.run("test.ping", minion_tgt=proxy_two) + assert ret.returncode == 0 + assert ret.data is True + + # Terminate the proxy minion + ret = factory.terminate() + assert ret.returncode == salt.defaults.exitcodes.EX_OK, ret diff --git a/tests/pytests/integration/cli/test_salt_key.py b/tests/pytests/integration/cli/test_salt_key.py index c4bd5cce16bb..23f9020ae7ef 100644 --- a/tests/pytests/integration/cli/test_salt_key.py +++ b/tests/pytests/integration/cli/test_salt_key.py @@ -12,7 +12,7 @@ import salt.utils.yaml pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] @@ -292,6 +292,13 @@ def test_keys_generation(salt_key_cli, tmp_path): filename.chmod(0o700) +def test_gen_keys_dir_without_gen_keys(salt_key_cli, tmp_path): + gen_keys_path = tmp_path / "temp-gen-keys-path" + ret = salt_key_cli.run("--gen-keys-dir", str(gen_keys_path)) + assert ret.returncode == 0 + assert not gen_keys_path.exists() + + def test_keys_generation_keysize_min(salt_key_cli, tmp_path): ret = salt_key_cli.run( "--gen-keys", "minibar", "--gen-keys-dir", str(tmp_path), "--keysize", "1024" diff --git a/tests/pytests/integration/cli/test_salt_master.py b/tests/pytests/integration/cli/test_salt_master.py index 6db26d0fdaef..555a7f1d97f9 100644 --- a/tests/pytests/integration/cli/test_salt_master.py +++ b/tests/pytests/integration/cli/test_salt_master.py @@ -8,7 +8,7 @@ from tests.support.helpers import PRE_PYTEST_SKIP_REASON pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] diff --git a/tests/pytests/integration/cli/test_salt_minion.py b/tests/pytests/integration/cli/test_salt_minion.py index 8cb73916d2fc..c0d60134746a 100644 --- a/tests/pytests/integration/cli/test_salt_minion.py +++ b/tests/pytests/integration/cli/test_salt_minion.py @@ -9,7 +9,7 @@ from tests.support.helpers import PRE_PYTEST_SKIP_REASON pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] diff --git a/tests/pytests/integration/cli/test_salt_proxy.py b/tests/pytests/integration/cli/test_salt_proxy.py index 69d8547c1143..fbf39e304389 100644 --- a/tests/pytests/integration/cli/test_salt_proxy.py +++ b/tests/pytests/integration/cli/test_salt_proxy.py @@ -25,7 +25,7 @@ def proxy_minion_id(salt_master): pytest.helpers.remove_stale_minion_key(salt_master, _proxy_minion_id) -@pytest.mark.slow_test +@pytest.mark.core_test def test_exit_status_no_proxyid(salt_master, proxy_minion_id): """ Ensure correct exit status when --proxyid argument is missing. @@ -42,6 +42,7 @@ def test_exit_status_no_proxyid(salt_master, proxy_minion_id): @pytest.mark.skip_on_windows(reason="Windows does not do user checks") +@pytest.mark.core_test def test_exit_status_unknown_user(salt_master, proxy_minion_id): """ Ensure correct exit status when the proxy is configured to run as an @@ -57,7 +58,7 @@ def test_exit_status_unknown_user(salt_master, proxy_minion_id): assert "The user is not available." in exc.value.process_result.stderr -@pytest.mark.slow_test +@pytest.mark.core_test def test_exit_status_unknown_argument(salt_master, proxy_minion_id): """ Ensure correct exit status when an unknown argument is passed to diff --git a/tests/pytests/integration/cli/test_salt_run.py b/tests/pytests/integration/cli/test_salt_run.py index 7581927a3a6d..9a051f55d923 100644 --- a/tests/pytests/integration/cli/test_salt_run.py +++ b/tests/pytests/integration/cli/test_salt_run.py @@ -9,7 +9,7 @@ import salt.utils.yaml pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] diff --git a/tests/pytests/integration/cli/test_salt_syndic.py b/tests/pytests/integration/cli/test_salt_syndic.py index 349a5aacf6e6..c547df2c2eb6 100644 --- a/tests/pytests/integration/cli/test_salt_syndic.py +++ b/tests/pytests/integration/cli/test_salt_syndic.py @@ -10,7 +10,7 @@ from tests.support.helpers import PRE_PYTEST_SKIP, PRE_PYTEST_SKIP_REASON pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] diff --git a/tests/pytests/integration/cli/test_syndic_eauth.py b/tests/pytests/integration/cli/test_syndic_eauth.py index 1577acf7b7dd..3fa4033ec5a8 100644 --- a/tests/pytests/integration/cli/test_syndic_eauth.py +++ b/tests/pytests/integration/cli/test_syndic_eauth.py @@ -5,9 +5,16 @@ import pytest +from tests.conftest import CODE_DIR + docker = pytest.importorskip("docker") +pytestmark = [ + pytest.mark.core_test, +] + + def json_output_to_dict(output): """ Convert ``salt ... --out=json`` Syndic return to a dictionary. Since the @@ -65,10 +72,19 @@ def syndic_network(): network.remove() -@pytest.fixture(scope="session") +@pytest.fixture(scope="module") def source_path(): - x = pathlib.Path(__file__).parent.parent.parent.parent.parent / "salt" - return str(x) + return str(CODE_DIR / "salt") + + +@pytest.fixture(scope="module") +def container_image_name(): + return "ghcr.io/saltstack/salt-ci-containers/salt:3005" + + +@pytest.fixture(scope="module") +def container_python_version(): + return "3.7" @pytest.fixture(scope="module") @@ -182,11 +198,18 @@ def config(source_path): @pytest.fixture(scope="module") -def docker_master(salt_factories, syndic_network, config, source_path): +def docker_master( + salt_factories, + syndic_network, + config, + source_path, + container_image_name, + container_python_version, +): config_dir = str(config["master_dir"]) container = salt_factories.get_container( "master", - image_name="saltstack/salt:3005", + image_name=container_image_name, container_run_kwargs={ # "entrypoint": "salt-master -ldebug", "entrypoint": "python -m http.server", @@ -194,7 +217,7 @@ def docker_master(salt_factories, syndic_network, config, source_path): "volumes": { config_dir: {"bind": "/etc/salt", "mode": "z"}, source_path: { - "bind": "/usr/local/lib/python3.7/site-packages/salt/", + "bind": f"/usr/local/lib/python{container_python_version}/site-packages/salt/", "mode": "z", }, }, @@ -206,18 +229,26 @@ def docker_master(salt_factories, syndic_network, config, source_path): # container.container_start_check(confirm_container_started, container) with container.started() as factory: for user in ("bob", "fnord"): - container.run(f"adduser -D {user}") - container.run(f"passwd -d {user}") - container.run("apk add linux-pam-dev") + ret = container.run(f"adduser {user}") + assert ret.returncode == 0 + ret = container.run(f"passwd -d {user}") + assert ret.returncode == 0 yield factory @pytest.fixture(scope="module") -def docker_minion(salt_factories, syndic_network, config, source_path): +def docker_minion( + salt_factories, + syndic_network, + config, + source_path, + container_image_name, + container_python_version, +): config_dir = str(config["minion_dir"]) container = salt_factories.get_container( "minion", - image_name="saltstack/salt:3005", + image_name=container_image_name, container_run_kwargs={ # "entrypoint": "salt-minion", "entrypoint": "python -m http.server", @@ -225,7 +256,7 @@ def docker_minion(salt_factories, syndic_network, config, source_path): "volumes": { config_dir: {"bind": "/etc/salt", "mode": "z"}, source_path: { - "bind": "/usr/local/lib/python3.7/site-packages/salt/", + "bind": f"/usr/local/lib/python{container_python_version}/site-packages/salt/", "mode": "z", }, }, @@ -240,11 +271,18 @@ def docker_minion(salt_factories, syndic_network, config, source_path): @pytest.fixture(scope="module") -def docker_syndic_a(salt_factories, config, syndic_network, source_path): +def docker_syndic_a( + salt_factories, + config, + syndic_network, + source_path, + container_image_name, + container_python_version, +): config_dir = str(config["syndic_a_dir"]) container = salt_factories.get_container( "syndic_a", - image_name="saltstack/salt:3005", + image_name=container_image_name, container_run_kwargs={ # "entrypoint": "salt-master -ldebug", "entrypoint": "python -m http.server", @@ -252,7 +290,7 @@ def docker_syndic_a(salt_factories, config, syndic_network, source_path): "volumes": { config_dir: {"bind": "/etc/salt", "mode": "z"}, source_path: { - "bind": "/usr/local/lib/python3.7/site-packages/salt/", + "bind": f"/usr/local/lib/python{container_python_version}/site-packages/salt/", "mode": "z", }, }, @@ -267,11 +305,18 @@ def docker_syndic_a(salt_factories, config, syndic_network, source_path): @pytest.fixture(scope="module") -def docker_syndic_b(salt_factories, config, syndic_network, source_path): +def docker_syndic_b( + salt_factories, + config, + syndic_network, + source_path, + container_image_name, + container_python_version, +): config_dir = str(config["syndic_b_dir"]) container = salt_factories.get_container( "syndic_b", - image_name="saltstack/salt:3005", + image_name=container_image_name, container_run_kwargs={ # "entrypoint": "salt-master -ldebug", "entrypoint": "python -m http.server", @@ -279,7 +324,7 @@ def docker_syndic_b(salt_factories, config, syndic_network, source_path): "volumes": { config_dir: {"bind": "/etc/salt", "mode": "z"}, source_path: { - "bind": "/usr/local/lib/python3.7/site-packages/salt/", + "bind": f"/usr/local/lib/python{container_python_version}/site-packages/salt/", "mode": "z", }, }, @@ -294,11 +339,18 @@ def docker_syndic_b(salt_factories, config, syndic_network, source_path): @pytest.fixture(scope="module") -def docker_minion_a1(salt_factories, config, syndic_network, source_path): +def docker_minion_a1( + salt_factories, + config, + syndic_network, + source_path, + container_image_name, + container_python_version, +): config_dir = str(config["minion_a1_dir"]) container = salt_factories.get_container( "minion_a1", - image_name="saltstack/salt:3005", + image_name=container_image_name, container_run_kwargs={ "network": syndic_network, # "entrypoint": "salt-minion -ldebug", @@ -306,7 +358,7 @@ def docker_minion_a1(salt_factories, config, syndic_network, source_path): "volumes": { config_dir: {"bind": "/etc/salt", "mode": "z"}, source_path: { - "bind": "/usr/local/lib/python3.7/site-packages/salt/", + "bind": f"/usr/local/lib/python{container_python_version}/site-packages/salt/", "mode": "z", }, }, @@ -321,11 +373,18 @@ def docker_minion_a1(salt_factories, config, syndic_network, source_path): @pytest.fixture(scope="module") -def docker_minion_a2(salt_factories, config, syndic_network, source_path): +def docker_minion_a2( + salt_factories, + config, + syndic_network, + source_path, + container_image_name, + container_python_version, +): config_dir = str(config["minion_a2_dir"]) container = salt_factories.get_container( "minion_a2", - image_name="saltstack/salt:3005", + image_name=container_image_name, container_run_kwargs={ "network": syndic_network, # "entrypoint": "salt-minion", @@ -333,7 +392,7 @@ def docker_minion_a2(salt_factories, config, syndic_network, source_path): "volumes": { config_dir: {"bind": "/etc/salt", "mode": "z"}, source_path: { - "bind": "/usr/local/lib/python3.7/site-packages/salt/", + "bind": f"/usr/local/lib/python{container_python_version}/site-packages/salt/", "mode": "z", }, }, @@ -348,11 +407,18 @@ def docker_minion_a2(salt_factories, config, syndic_network, source_path): @pytest.fixture(scope="module") -def docker_minion_b1(salt_factories, config, syndic_network, source_path): +def docker_minion_b1( + salt_factories, + config, + syndic_network, + source_path, + container_image_name, + container_python_version, +): config_dir = str(config["minion_b1_dir"]) container = salt_factories.get_container( "minion_b1", - image_name="saltstack/salt:3005", + image_name=container_image_name, container_run_kwargs={ "network": syndic_network, # "entrypoint": "salt-minion", @@ -360,7 +426,7 @@ def docker_minion_b1(salt_factories, config, syndic_network, source_path): "volumes": { config_dir: {"bind": "/etc/salt", "mode": "z"}, source_path: { - "bind": "/usr/local/lib/python3.7/site-packages/salt/", + "bind": f"/usr/local/lib/python{container_python_version}/site-packages/salt/", "mode": "z", }, }, @@ -375,11 +441,18 @@ def docker_minion_b1(salt_factories, config, syndic_network, source_path): @pytest.fixture(scope="module") -def docker_minion_b2(salt_factories, config, syndic_network, source_path): +def docker_minion_b2( + salt_factories, + config, + syndic_network, + source_path, + container_image_name, + container_python_version, +): config_dir = str(config["minion_b2_dir"]) container = salt_factories.get_container( "minion_b2", - image_name="saltstack/salt:3005", + image_name=container_image_name, container_run_kwargs={ "network": syndic_network, # "entrypoint": "salt-minion", @@ -387,7 +460,7 @@ def docker_minion_b2(salt_factories, config, syndic_network, source_path): "volumes": { config_dir: {"bind": "/etc/salt", "mode": "z"}, source_path: { - "bind": "/usr/local/lib/python3.7/site-packages/salt/", + "bind": f"/usr/local/lib/python{container_python_version}/site-packages/salt/", "mode": "z", }, }, diff --git a/tests/pytests/integration/client/test_runner.py b/tests/pytests/integration/client/test_runner.py index 3303277da810..74a25cb1353c 100644 --- a/tests/pytests/integration/client/test_runner.py +++ b/tests/pytests/integration/client/test_runner.py @@ -150,3 +150,14 @@ def test_invalid_kwargs_are_ignored(client, auth_creds): ret = client.cmd_sync(low.copy()) assert ret assert ret[0] == "foo" + + +def test_get_docs(client): + ret = client.get_docs(arg="*") + assert "auth.del_token" in ret + assert "auth.mk_token" in ret + assert "cache.clear_pillar" in ret + assert "cache.grains" in ret + assert "state.soft_kill" in ret + assert "virt.start" in ret + assert "test.arg" in ret diff --git a/tests/pytests/integration/daemons/test_masterapi.py b/tests/pytests/integration/daemons/test_masterapi.py index d95d3781f7fb..d57eeeab0922 100644 --- a/tests/pytests/integration/daemons/test_masterapi.py +++ b/tests/pytests/integration/daemons/test_masterapi.py @@ -5,6 +5,7 @@ import os import shutil import stat +import subprocess import pytest @@ -12,7 +13,9 @@ import salt.utils.stringutils from tests.support.runtests import RUNTIME_VARS -pytestmark = [pytest.mark.slow_test] +pytestmark = [ + pytest.mark.slow_test, +] @pytest.fixture @@ -118,3 +121,23 @@ def test_autosign_grains_fail( ) # get minion to try to authenticate itself again assert salt_minion.id not in salt_key_cli.run("-l", "acc") assert salt_minion.id in salt_key_cli.run("-l", "un") + + +@pytest.mark.skip_unless_on_linux +@pytest.mark.slow_test +def test_ufw_allow(salt_master, grains): + if grains["os_family"] != "Debian": + pytest.skip("Only runs on Debian family.") + + expected_output = """Skipping adding existing rule +Skipping adding existing rule (v6) + """ + + proc = subprocess.Popen( + "ufw allow salt\n", shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE + ) + out, err = proc.communicate() + out_strg = out.decode() + err_strg = err.decode() + assert out_strg == expected_output + assert err_strg != "ERROR: Could not find a profile matching 'salt'" diff --git a/tests/pytests/integration/daemons/test_memory_leak.py b/tests/pytests/integration/daemons/test_memory_leak.py index 1b782760418f..fb608fc18643 100644 --- a/tests/pytests/integration/daemons/test_memory_leak.py +++ b/tests/pytests/integration/daemons/test_memory_leak.py @@ -44,6 +44,7 @@ def file_add_delete_sls(testfile_path, base_env_state_tree_root_dir): yield sls_name +@pytest.mark.skip_on_fips_enabled_platform @pytest.mark.skip_on_darwin(reason="MacOS is a spawning platform, won't work") @pytest.mark.flaky(max_runs=4) def test_memory_leak(salt_cli, salt_minion, file_add_delete_sls): diff --git a/tests/pytests/integration/master/test_payload.py b/tests/pytests/integration/master/test_payload.py new file mode 100644 index 000000000000..692005b56928 --- /dev/null +++ b/tests/pytests/integration/master/test_payload.py @@ -0,0 +1,37 @@ +""" +Tests for payload +""" +import pytest + + +@pytest.mark.slow_test +@pytest.mark.skip_if_not_root +@pytest.mark.skip_on_windows +@pytest.mark.skip_on_darwin +def test_payload_no_exception(salt_cli, salt_master, salt_minion): + """ + Test to confirm that no exception is thrown with the jinja file + when executed on the minion + """ + test_set_hostname = """ + {%- set host = pillar.get("hostname", "UNKNOWN") %} + {%- if host == 'UNKNOWN' %} + {{ raise("Unsupported UNKNOWN hostname") }} + {%- else %} + hostnamectl set-hostname {{ host }} + {%- endif %} + """ + with salt_master.state_tree.base.temp_file("set_hostname.j2", test_set_hostname): + + ret = salt_cli.run("test.ping", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert ret.data is True + + ret = salt_cli.run( + "cmd.script", + "salt://set_hostname.j2", + "template=jinja", + pillar={"hostname": "test"}, + minion_tgt=salt_minion.id, + ) + assert "AttributeError:" not in ret.stdout diff --git a/tests/pytests/integration/minion/test_job_return.py b/tests/pytests/integration/minion/test_job_return.py new file mode 100644 index 000000000000..19f25e8baa20 --- /dev/null +++ b/tests/pytests/integration/minion/test_job_return.py @@ -0,0 +1,107 @@ +import os +import shutil +import subprocess + +import pytest + +import salt.utils.platform + + +@pytest.fixture +def salt_master_1(request, salt_factories): + config_defaults = { + "open_mode": True, + "transport": request.config.getoption("--transport"), + } + config_overrides = { + "interface": "127.0.0.1", + } + + factory = salt_factories.salt_master_daemon( + "master-1", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory + + +@pytest.fixture +def salt_master_2(salt_factories, salt_master_1): + if salt.utils.platform.is_darwin() or salt.utils.platform.is_freebsd(): + subprocess.check_output(["ifconfig", "lo0", "alias", "127.0.0.2", "up"]) + + config_defaults = { + "open_mode": True, + "transport": salt_master_1.config["transport"], + } + config_overrides = { + "interface": "127.0.0.2", + } + + # Use the same ports for both masters, they are binding to different interfaces + for key in ( + "ret_port", + "publish_port", + ): + config_overrides[key] = salt_master_1.config[key] + factory = salt_factories.salt_master_daemon( + "master-2", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + + # The secondary salt master depends on the primarily salt master fixture + # because we need to clone the keys + for keyfile in ("master.pem", "master.pub"): + shutil.copyfile( + os.path.join(salt_master_1.config["pki_dir"], keyfile), + os.path.join(factory.config["pki_dir"], keyfile), + ) + with factory.started(start_timeout=120): + yield factory + + +@pytest.fixture +def salt_minion_1(salt_master_1, salt_master_2): + config_defaults = { + "transport": salt_master_1.config["transport"], + } + + master_1_port = salt_master_1.config["ret_port"] + master_1_addr = salt_master_1.config["interface"] + master_2_port = salt_master_2.config["ret_port"] + master_2_addr = salt_master_2.config["interface"] + config_overrides = { + "master": [ + "{}:{}".format(master_1_addr, master_1_port), + "{}:{}".format(master_2_addr, master_2_port), + ], + "test.foo": "baz", + } + factory = salt_master_1.salt_minion_daemon( + "minion-1", + defaults=config_defaults, + overrides=config_overrides, + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + with factory.started(start_timeout=120): + yield factory + + +def test_job_resturn(salt_master_1, salt_master_2, salt_minion_1): + cli = salt_master_1.salt_cli(timeout=120) + ret = cli.run("test.ping", "-v", minion_tgt="minion-1") + for line in ret.stdout.splitlines(): + if "with jid" in line: + jid = line.split("with jid")[1].strip() + + run_1 = salt_master_1.salt_run_cli(timeout=120) + ret = run_1.run("jobs.lookup_jid", jid) + assert ret.data == {"minion-1": True} + + run_2 = salt_master_2.salt_run_cli(timeout=120) + ret = run_2.run("jobs.lookup_jid", jid) + assert ret.data == {} diff --git a/tests/pytests/integration/minion/test_reauth.py b/tests/pytests/integration/minion/test_reauth.py new file mode 100644 index 000000000000..14a1e873551a --- /dev/null +++ b/tests/pytests/integration/minion/test_reauth.py @@ -0,0 +1,49 @@ +import time + + +def test_reauth(salt_master_factory, event_listener): + """ + Validate non of our platform need to re-authenticate when runing a job with + multiprocessing=True. + """ + sls_name = "issue-64941" + sls_contents = """ + custom_test_state: + test.configurable_test_state: + - name: example + - changes: True + - result: True + - comment: "Nothing has actually been changed" + """ + events = [] + + def handler(data): + events.append(data) + + event_listener.register_auth_event_handler("test_reauth-master", handler) + master = salt_master_factory.salt_master_daemon( + "test_reauth-master", + overrides={"log_level": "info"}, + ) + sls_tempfile = master.state_tree.base.temp_file( + "{}.sls".format(sls_name), sls_contents + ) + minion = master.salt_minion_daemon( + "test_reauth-minion", + overrides={"log_level": "info"}, + ) + cli = master.salt_cli() + start_time = time.time() + with master.started(), minion.started(): + events = event_listener.get_events( + [(master.id, "salt/auth")], + after_time=start_time, + ) + num_auth = len(events) + proc = cli.run("state.sls", sls_name, minion_tgt="*") + assert proc.returncode == 1 + events = event_listener.get_events( + [(master.id, "salt/auth")], + after_time=start_time, + ) + assert num_auth == len(events) diff --git a/tests/pytests/integration/minion/test_return_retries.py b/tests/pytests/integration/minion/test_return_retries.py index a7f5eaeff16f..8a226d26cd46 100644 --- a/tests/pytests/integration/minion/test_return_retries.py +++ b/tests/pytests/integration/minion/test_return_retries.py @@ -5,20 +5,20 @@ @pytest.fixture(scope="function") -def salt_minion_retry(salt_master_factory, salt_minion_id): +def salt_minion_retry(salt_master, salt_minion_id): # override the defaults for this test config_overrides = { "return_retry_timer_max": 0, "return_retry_timer": 5, "return_retry_tries": 30, } - factory = salt_master_factory.salt_minion_daemon( + factory = salt_master.salt_minion_daemon( random_string("retry-minion-"), overrides=config_overrides, extra_cli_arguments_after_first_start_failure=["--log-level=info"], ) factory.after_terminate( - pytest.helpers.remove_stale_minion_key, salt_master_factory, factory.id + pytest.helpers.remove_stale_minion_key, salt_master, factory.id ) with factory.started(): @@ -37,7 +37,7 @@ def test_publish_retry(salt_master, salt_minion_retry, salt_cli, salt_run_cli): # verify we don't yet have the result and sleep assert salt_run_cli.run("jobs.lookup_jid", jid, _timeout=60).data == {} - # the 70s sleep (and 60s timer value) is to reduce flakiness due to slower test runs + # the 5s sleep (and 60s timeout value) is to reduce flakiness due to slower test runs # and should be addresses when number of tries is configurable through minion opts time.sleep(5) @@ -50,3 +50,68 @@ def test_publish_retry(salt_master, salt_minion_retry, salt_cli, salt_run_cli): assert salt_minion_retry.id in data assert data[salt_minion_retry.id] is True + + +@pytest.mark.slow_test +def test_pillar_timeout(salt_master_factory): + cmd = """ + python -c "import time; time.sleep(2.5); print('{\\"foo\\": \\"bar\\"}');\" + """.strip() + master_overrides = { + "ext_pillar": [ + {"cmd_json": cmd}, + ], + "auto_accept": True, + "worker_threads": 2, + "peer": True, + } + minion_overrides = { + "auth_timeout": 20, + "request_channel_timeout": 5, + "request_channel_tries": 1, + } + sls_name = "issue-50221" + sls_contents = """ + custom_test_state: + test.configurable_test_state: + - name: example + - changes: True + - result: True + - comment: "Nothing has actually been changed" + """ + master = salt_master_factory.salt_master_daemon( + "pillar-timeout-master", + overrides=master_overrides, + ) + minion1 = master.salt_minion_daemon( + random_string("pillar-timeout-1-"), + overrides=minion_overrides, + ) + minion2 = master.salt_minion_daemon( + random_string("pillar-timeout-2-"), + overrides=minion_overrides, + ) + minion3 = master.salt_minion_daemon( + random_string("pillar-timeout-3-"), + overrides=minion_overrides, + ) + minion4 = master.salt_minion_daemon( + random_string("pillar-timeout-4-"), + overrides=minion_overrides, + ) + cli = master.salt_cli() + sls_tempfile = master.state_tree.base.temp_file( + "{}.sls".format(sls_name), sls_contents + ) + with master.started(), minion1.started(), minion2.started(), minion3.started(), minion4.started(), sls_tempfile: + proc = cli.run("state.sls", sls_name, minion_tgt="*") + # At least one minion should have a Pillar timeout + assert proc.returncode == 1 + minion_timed_out = False + # Find the minion that has a Pillar timeout + for key in proc.data: + if isinstance(proc.data[key], str): + if "Pillar timed out" in proc.data[key]: + minion_timed_out = True + break + assert minion_timed_out is True diff --git a/tests/pytests/integration/modules/state/test_state_pillar_errors.py b/tests/pytests/integration/modules/state/test_state_pillar_errors.py index 99dd5f175d0b..26812cb71b5b 100644 --- a/tests/pytests/integration/modules/state/test_state_pillar_errors.py +++ b/tests/pytests/integration/modules/state/test_state_pillar_errors.py @@ -16,8 +16,8 @@ def reset_pillar(salt_call_cli): finally: # Refresh pillar once all tests are done. ret = salt_call_cli.run("saltutil.refresh_pillar", wait=True) - assert ret.exitcode == 0 - assert ret.json is True + assert ret.returncode == 0 + assert ret.data is True @pytest.fixture @@ -77,8 +77,8 @@ def test_state_apply_aborts_on_pillar_error( shell_result = salt_cli.run( "state.apply", "sls-id-test", minion_tgt=salt_minion.id ) - assert shell_result.exitcode == 1 - assert shell_result.json == expected_comment + assert shell_result.returncode == 1 + assert shell_result.data == expected_comment @pytest.mark.usefixtures("testfile_path", "reset_pillar") @@ -117,7 +117,7 @@ def test_state_apply_continues_after_pillar_error_is_fixed( shell_result = salt_cli.run( "saltutil.refresh_pillar", minion_tgt=salt_minion.id ) - assert shell_result.exitcode == 0 + assert shell_result.returncode == 0 # run state.apply with fixed pillar render error with pytest.helpers.temp_file( @@ -128,7 +128,7 @@ def test_state_apply_continues_after_pillar_error_is_fixed( shell_result = salt_cli.run( "state.apply", "sls-id-test", minion_tgt=salt_minion.id ) - assert shell_result.exitcode == 0 - state_result = StateResult(shell_result.json) + assert shell_result.returncode == 0 + state_result = StateResult(shell_result.data) assert state_result.result is True assert state_result.changes == {"diff": "New file", "mode": "0644"} diff --git a/tests/pytests/integration/modules/test_beacons.py b/tests/pytests/integration/modules/test_beacons.py index c15485cb76a0..131ebe5eb476 100644 --- a/tests/pytests/integration/modules/test_beacons.py +++ b/tests/pytests/integration/modules/test_beacons.py @@ -10,7 +10,7 @@ from tests.support.helpers import PRE_PYTEST_SKIP_OR_NOT pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, ] diff --git a/tests/pytests/integration/modules/test_jinja.py b/tests/pytests/integration/modules/test_jinja.py new file mode 100644 index 000000000000..0ae98dbf7dc4 --- /dev/null +++ b/tests/pytests/integration/modules/test_jinja.py @@ -0,0 +1,64 @@ +""" +Test the jinja module +""" + +import os + +import salt.utils.files +import salt.utils.json +import salt.utils.yaml +from tests.support.runtests import RUNTIME_VARS + + +def _path(name, absolute=False): + path = os.path.join("modules", "jinja", name) + if absolute: + return os.path.join(RUNTIME_VARS.BASE_FILES, path) + else: + return path + + +def test_import_json(salt_cli, salt_minion): + json_file = "osarchmap.json" + ret = salt_cli.run("jinja.import_json", _path(json_file), minion_tgt=salt_minion.id) + with salt.utils.files.fopen(_path(json_file, absolute=True)) as fh_: + assert salt.utils.json.load(fh_) == ret.data + + +def test_import_yaml(salt_cli, salt_minion): + yaml_file = "defaults.yaml" + ret = salt_cli.run("jinja.import_yaml", _path(yaml_file), minion_tgt=salt_minion.id) + with salt.utils.files.fopen(_path(yaml_file, absolute=True)) as fh_: + assert salt.utils.yaml.safe_load(fh_) == ret.data + + +def test_load_map(grains, salt_cli, salt_minion): + ret = salt_cli.run( + "jinja.load_map", _path("map.jinja"), "template", minion_tgt=salt_minion.id + ) + + assert isinstance( + ret.data, dict + ), "failed to return dictionary from jinja.load_map: {}".format(ret) + + with salt.utils.files.fopen(_path("defaults.yaml", absolute=True)) as fh_: + defaults = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osarchmap.json", absolute=True)) as fh_: + osarchmap = salt.utils.json.load(fh_) + with salt.utils.files.fopen(_path("osfamilymap.yaml", absolute=True)) as fh_: + osfamilymap = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osmap.yaml", absolute=True)) as fh_: + osmap = salt.utils.yaml.safe_load(fh_) + with salt.utils.files.fopen(_path("osfingermap.yaml", absolute=True)) as fh_: + osfingermap = salt.utils.yaml.safe_load(fh_) + + assert ret.data.get("arch") == osarchmap.get(grains["osarch"], {}).get("arch") + assert ret.data.get("config") == osfingermap.get(grains["osfinger"], {}).get( + "config", + osmap.get(grains["os"], {}).get( + "config", + osfamilymap.get(grains["os_family"], {}).get( + "config", defaults.get("template").get("config") + ), + ), + ) diff --git a/tests/pytests/integration/modules/test_mac_sysctl.py b/tests/pytests/integration/modules/test_mac_sysctl.py new file mode 100644 index 000000000000..a71a96f85d6b --- /dev/null +++ b/tests/pytests/integration/modules/test_mac_sysctl.py @@ -0,0 +1,188 @@ +""" + :codeauthor: Nicole Thomas +""" + +import os +import random + +import pytest + +import salt.utils.files +from salt.exceptions import CommandExecutionError + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.destructive_test, + pytest.mark.skip_if_not_root, + pytest.mark.skip_unless_on_darwin, +] + + +@pytest.fixture(scope="function") +def assign_cmd(): + return "net.inet.icmp.timestamp" + + +@pytest.fixture(scope="function") +def config_file(): + return "/etc/sysctl.conf" + + +@pytest.fixture(scope="function") +def setup_teardown_vars(salt_call_cli, assign_cmd, config_file): + has_conf = False + ret = salt_call_cli.run("sysctl.get", assign_cmd, config_file) + val = ret.data + + if val is None: + pytest.skip(f"The call 'sysctl.get {assign_cmd}' returned: None") + + # If sysctl file is present, make a copy + # Remove original file so we can replace it with test files + if os.path.isfile(config_file): + has_conf = True + try: + temp_sysctl_config = __copy_sysctl(config_file) + except CommandExecutionError: + msg = "Could not copy file: {0}" + raise CommandExecutionError(msg.format(config_file)) + os.remove(config_file) + + try: + yield val + finally: + ret = salt_call_cli.run("sysctl.get", assign_cmd) + if ret.data != val: + salt_call_cli.run("sysctl.assign", assign_cmd, val) + + if has_conf is True: + # restore original sysctl file + __restore_sysctl(config_file, temp_sysctl_config) + + if has_conf is False and os.path.isfile(config_file): + # remove sysctl.conf created by tests + os.remove(temp_sysctl_config) + + +def test_assign(salt_call_cli, assign_cmd, setup_teardown_vars): + """ + Tests assigning a single sysctl parameter + """ + val = setup_teardown_vars[0] + + try: + rand = random.randint(0, 500) + while rand == val: + rand = random.randint(0, 500) + salt_call_cli.run("sysctl.assign", assign_cmd, rand) + ret = int(salt_call_cli.run("sysctl.get", assign_cmd)) + info = int(ret.data) + try: + assert rand == info + except AssertionError: + salt_call_cli.run("sysctl.assign", assign_cmd, val) + raise + except CommandExecutionError: + salt_call_cli.run("sysctl.assign", assign_cmd, val) + raise + + +def test_persist_new_file(salt_call_cli, assign_cmd, config_file): + """ + Tests assigning a sysctl value to a system without a sysctl.conf file + """ + # Always start with a clean/known sysctl.conf state + if os.path.isfile(config_file): + os.remove(config_file) + try: + salt_call_cli.run("sysctl.persist", assign_cmd, 10) + line = f"{assign_cmd}={10}" + found = __check_string(config_file, line) + assert found + except CommandExecutionError: + os.remove(config_file) + raise + + +def test_persist_already_set(salt_call_cli, config_file, setup_teardown_vars): + """ + Tests assigning a sysctl value that is already set in sysctl.conf file + """ + # Always start with a clean/known sysctl.conf state + if os.path.isfile(config_file): + os.remove(config_file) + try: + salt_call_cli.run("sysctl.persist", assign_cmd, 50) + ret = salt_call_cli.run("sysctl.persist", assign_cmd, 50) + assert ret.data == "Already set" + except CommandExecutionError: + os.remove(config_file) + raise + + +def test_persist_apply_change( + salt_call_cli, assign_cmd, config_file, setup_teardown_vars +): + """ + Tests assigning a sysctl value and applying the change to system + """ + val = setup_teardown_vars[0] + + # Always start with a clean/known sysctl.conf state + if os.path.isfile(config_file): + os.remove(config_file) + try: + rand = random.randint(0, 500) + while rand == val: + rand = random.randint(0, 500) + salt_call_cli.run("sysctl.persist", assign_cmd, rand, apply_change=True) + ret = salt_call_cli.run("sysctl.get", assign_cmd) + info = int(ret.data) + assert info == rand + except CommandExecutionError: + os.remove(config_file) + raise + + +def __copy_sysctl(CONFIG): + """ + Copies an existing sysconf file and returns temp file path. Copied + file will be restored in tearDown + """ + # Create new temporary file path and open needed files + temp_path = salt.utils.files.mkstemp() + with salt.utils.files.fopen(CONFIG, "r") as org_conf: + with salt.utils.files.fopen(temp_path, "w") as temp_sysconf: + # write sysctl lines to temp file + for line in org_conf: + temp_sysconf.write(line) + return temp_path + + +def __restore_sysctl(sysctl_config, temp_sysctl_config): + """ + Restores the original sysctl.conf file from temporary copy + """ + # If sysctl testing file exists, delete it + if os.path.isfile(sysctl_config): + os.remove(sysctl_config) + + # write temp lines to sysctl file to restore + with salt.utils.files.fopen(temp_sysctl_config, "r") as temp_sysctl: + with salt.utils.files.fopen(sysctl_config, "w") as sysctl: + for line in temp_sysctl: + sysctl.write(line) + + # delete temporary file + os.remove(temp_sysctl_config) + + +def __check_string(conf_file, to_find): + """ + Returns True if given line is present in file + """ + with salt.utils.files.fopen(conf_file, "r") as f_in: + for line in f_in: + if to_find in salt.utils.stringutils.to_unicode(line): + return True + return False diff --git a/tests/pytests/integration/modules/test_pip.py b/tests/pytests/integration/modules/test_pip.py new file mode 100644 index 000000000000..141acc1806e1 --- /dev/null +++ b/tests/pytests/integration/modules/test_pip.py @@ -0,0 +1,651 @@ +import os +import pprint +import re +import shutil + +import pytest + +import salt.utils.files +import salt.utils.path +import salt.utils.platform +from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES +from tests.support.helpers import VirtualEnv, patched_environ + +pytestmark = [ + pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False), + pytest.mark.windows_whitelisted, +] + + +@pytest.fixture(autouse=True) +def patch_environment(): + with patched_environ( + PIP_SOURCE_DIR="", + PIP_BUILD_DIR="", + __cleanup__=[k for k in os.environ if k.startswith("PIP_")], + ): + yield + + +@pytest.fixture +def venv_dir(tmp_path): + return str(tmp_path / "venv_dir") + + +def _check_download_error(ret): + """ + Checks to see if a download error looks transitory + """ + return any(w in ret for w in ["URLError", "Download error"]) + + +def _pip_successful_install( + target, + expect=( + "irc3-plugins-test", + "pep8", + ), +): + """ + isolate regex for extracting `successful install` message from pip + """ + + expect = set(expect) + expect_str = "|".join(expect) + + success = re.search( + r"^.*Successfully installed\s([^\n]+)(?:Clean.*)?", target, re.M | re.S + ) + + success_for = ( + re.findall(r"({})(?:-(?:[\d\.-]))?".format(expect_str), success.groups()[0]) + if success + else [] + ) + + return expect.issubset(set(success_for)) + + +@pytest.mark.slow_test +def test_issue_2087_missing_pip(venv_dir, salt_cli, salt_minion): + # Let's create the testing virtualenv + with VirtualEnv(venv_dir): + + # Let's remove the pip binary + pip_bin = os.path.join(venv_dir, "bin", "pip") + site_dir = salt_cli.run( + "virtualenv.get_distribution_path", + venv_dir, + "pip", + minion_tgt=salt_minion.id, + ).data + if salt.utils.platform.is_windows(): + pip_bin = os.path.join(venv_dir, "Scripts", "pip.exe") + site_dir = os.path.join(venv_dir, "lib", "site-packages") + if not os.path.isfile(pip_bin): + pytest.skip("Failed to find the pip binary to the test virtualenv") + os.remove(pip_bin) + + # Also remove the pip dir from site-packages + # This is needed now that we're using python -m pip instead of the + # pip binary directly. python -m pip will still work even if the + # pip binary is missing + shutil.rmtree(os.path.join(site_dir, "pip")) + + # Let's run a pip depending functions + for func in ("pip.freeze", "pip.list"): + ret = salt_cli.run(func, bin_env=venv_dir, minion_tgt=salt_minion.id).data + assert ( + "Command required for '{}' not found: Could not find a `pip` binary".format( + func + ) + in ret + ) + + +@pytest.mark.slow_test +def test_requirements_as_list_of_chains__cwd_set__absolute_file_path( + venv_dir, salt_cli, salt_minion +): + with VirtualEnv(venv_dir): + + # Create a requirements file that depends on another one. + + req1_filename = os.path.join(venv_dir, "requirements1.txt") + req1b_filename = os.path.join(venv_dir, "requirements1b.txt") + req2_filename = os.path.join(venv_dir, "requirements2.txt") + req2b_filename = os.path.join(venv_dir, "requirements2b.txt") + + with salt.utils.files.fopen(req1_filename, "w") as f: + f.write("-r requirements1b.txt\n") + with salt.utils.files.fopen(req1b_filename, "w") as f: + f.write("irc3-plugins-test\n") + with salt.utils.files.fopen(req2_filename, "w") as f: + f.write("-r requirements2b.txt\n") + with salt.utils.files.fopen(req2b_filename, "w") as f: + f.write("pep8\n") + + requirements_list = [req1_filename, req2_filename] + + ret = salt_cli.run( + "pip.install", + requirements=requirements_list, + bin_env=venv_dir, + cwd=venv_dir, + minion_tgt=salt_minion.id, + ) + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + assert ret.returncode == 0 + found = _pip_successful_install(ret.stdout) + assert found + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_requirements_as_list_of_chains__cwd_not_set__absolute_file_path( + venv_dir, salt_cli, salt_minion +): + with VirtualEnv(venv_dir): + + # Create a requirements file that depends on another one. + + req1_filename = os.path.join(venv_dir, "requirements1.txt") + req1b_filename = os.path.join(venv_dir, "requirements1b.txt") + req2_filename = os.path.join(venv_dir, "requirements2.txt") + req2b_filename = os.path.join(venv_dir, "requirements2b.txt") + + with salt.utils.files.fopen(req1_filename, "w") as f: + f.write("-r requirements1b.txt\n") + with salt.utils.files.fopen(req1b_filename, "w") as f: + f.write("irc3-plugins-test\n") + with salt.utils.files.fopen(req2_filename, "w") as f: + f.write("-r requirements2b.txt\n") + with salt.utils.files.fopen(req2b_filename, "w") as f: + f.write("pep8\n") + + requirements_list = [req1_filename, req2_filename] + + ret = salt_cli.run( + "pip.install", + requirements=requirements_list, + bin_env=venv_dir, + minion_tgt=salt_minion.id, + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + assert ret.returncode == 0 + found = _pip_successful_install(ret.stdout) + assert found + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_requirements_as_list__absolute_file_path(venv_dir, salt_cli, salt_minion): + with VirtualEnv(venv_dir): + + req1_filename = os.path.join(venv_dir, "requirements.txt") + req2_filename = os.path.join(venv_dir, "requirements2.txt") + + with salt.utils.files.fopen(req1_filename, "w") as f: + f.write("irc3-plugins-test\n") + with salt.utils.files.fopen(req2_filename, "w") as f: + f.write("pep8\n") + + requirements_list = [req1_filename, req2_filename] + + ret = salt_cli.run( + "pip.install", + requirements=requirements_list, + bin_env=venv_dir, + minion_tgt=salt_minion.id, + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + assert ret.returncode == 0 + found = _pip_successful_install(ret.stdout) + assert found + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_requirements_as_list__non_absolute_file_path(venv_dir, salt_cli, salt_minion): + with VirtualEnv(venv_dir): + + # Create a requirements file that depends on another one. + + req1_filename = "requirements.txt" + req2_filename = "requirements2.txt" + req_cwd = venv_dir + + req1_filepath = os.path.join(req_cwd, req1_filename) + req2_filepath = os.path.join(req_cwd, req2_filename) + + with salt.utils.files.fopen(req1_filepath, "w") as f: + f.write("irc3-plugins-test\n") + with salt.utils.files.fopen(req2_filepath, "w") as f: + f.write("pep8\n") + + requirements_list = [req1_filename, req2_filename] + + ret = salt_cli.run( + "pip.install", + f"cwd={req_cwd}", + requirements=requirements_list, + bin_env=venv_dir, + minion_tgt=salt_minion.id, + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + assert ret.returncode == 0 + found = _pip_successful_install(ret.stdout) + assert found + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_chained_requirements__absolute_file_path(venv_dir, salt_cli, salt_minion): + with VirtualEnv(venv_dir): + + # Create a requirements file that depends on another one. + + req1_filename = os.path.join(venv_dir, "requirements.txt") + req2_filename = os.path.join(venv_dir, "requirements2.txt") + + with salt.utils.files.fopen(req1_filename, "w") as f: + f.write("-r requirements2.txt") + with salt.utils.files.fopen(req2_filename, "w") as f: + f.write("pep8") + + ret = salt_cli.run( + "pip.install", + requirements=req1_filename, + bin_env=venv_dir, + minion_tgt=salt_minion.id, + ) + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + assert ret.returncode == 0 + assert "installed pep8" in ret.stdout + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_chained_requirements__non_absolute_file_path(venv_dir, salt_cli, salt_minion): + with VirtualEnv(venv_dir): + + # Create a requirements file that depends on another one. + req_basepath = venv_dir + + req1_filename = "requirements.txt" + req2_filename = "requirements2.txt" + + req1_file = os.path.join(venv_dir, req1_filename) + req2_file = os.path.join(venv_dir, req2_filename) + + with salt.utils.files.fopen(req1_file, "w") as f: + f.write("-r requirements2.txt") + with salt.utils.files.fopen(req2_file, "w") as f: + f.write("pep8") + + ret = salt_cli.run( + "pip.install", + f"cwd={req_basepath}", + requirements=req1_filename, + bin_env=venv_dir, + minion_tgt=salt_minion.id, + ) + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + assert ret.returncode == 0 + assert "installed pep8" in ret.stdout + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_issue_4805_nested_requirements(venv_dir, salt_cli, salt_minion): + with VirtualEnv(venv_dir): + + # Create a requirements file that depends on another one. + req1_filename = os.path.join(venv_dir, "requirements.txt") + req2_filename = os.path.join(venv_dir, "requirements2.txt") + with salt.utils.files.fopen(req1_filename, "w") as f: + f.write("-r requirements2.txt") + with salt.utils.files.fopen(req2_filename, "w") as f: + f.write("pep8") + + ret = salt_cli.run( + "pip.install", + requirements=req1_filename, + bin_env=venv_dir, + timeout=300, + minion_tgt=salt_minion.id, + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + if _check_download_error(ret.stdout): + pytest.skip("Test skipped due to pip download error") + assert ret.returncode == 0 + assert "installed pep8" in ret.stdout + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_pip_uninstall(venv_dir, salt_cli, salt_minion): + # Let's create the testing virtualenv + with VirtualEnv(venv_dir): + ret = salt_cli.run( + "pip.install", ["pep8"], bin_env=venv_dir, minion_tgt=salt_minion.id + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + if _check_download_error(ret.stdout): + pytest.skip("Test skipped due to pip download error") + assert ret.returncode == 0 + assert "installed pep8" in ret.stdout + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + ret = salt_cli.run( + "pip.uninstall", ["pep8"], bin_env=venv_dir, minion_tgt=salt_minion.id + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.uninstall' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + assert ret.returncode == 0 + assert "uninstalled pep8" in ret.stdout + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_pip_install_upgrade(venv_dir, salt_cli, salt_minion): + # Create the testing virtualenv + with VirtualEnv(venv_dir): + ret = salt_cli.run( + "pip.install", "pep8==1.3.4", bin_env=venv_dir, minion_tgt=salt_minion.id + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + if _check_download_error(ret.stdout): + pytest.skip("Test skipped due to pip download error") + assert ret.returncode == 0 + assert "installed pep8" in ret.stdout + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + ret = salt_cli.run( + "pip.install", + "pep8", + bin_env=venv_dir, + upgrade=True, + minion_tgt=salt_minion.id, + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + if _check_download_error(ret.stdout): + pytest.skip("Test skipped due to pip download error") + assert ret.returncode == 0 + assert "installed pep8" in ret.stdout + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + ret = salt_cli.run( + "pip.uninstall", "pep8", bin_env=venv_dir, minion_tgt=salt_minion.id + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.uninstall' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + assert ret.returncode == 0 + assert "uninstalled pep8" in ret.stdout + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_pip_install_multiple_editables(venv_dir, salt_cli, salt_minion): + editables = [ + "git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr", + "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", + ] + + # Create the testing virtualenv + with VirtualEnv(venv_dir): + ret = salt_cli.run( + "pip.install", + [], + editable="{}".format(",".join(editables)), + bin_env=venv_dir, + minion_tgt=salt_minion.id, + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + if _check_download_error(ret.stdout): + pytest.skip("Test skipped due to pip download error") + assert ret.returncode == 0 + for package in ("iStr", "SaltTesting"): + match = re.search( + r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format(package), + ret.stdout, + ) + assert match is not None + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.slow_test +def test_pip_install_multiple_editables_and_pkgs(venv_dir, salt_cli, salt_minion): + editables = [ + "git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr", + "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", + ] + + # Create the testing virtualenv + with VirtualEnv(venv_dir): + ret = salt_cli.run( + "pip.install", + ["pep8"], + editable="{}".format(",".join(editables)), + bin_env=venv_dir, + minion_tgt=salt_minion.id, + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + try: + if _check_download_error(ret.stdout): + pytest.skip("Test skipped due to pip download error") + assert ret.returncode == 0 + for package in ("iStr", "SaltTesting", "pep8"): + match = re.search( + r"(?:.*)(Successfully installed)(?:.*)({})(?:.*)".format(package), + ret.stdout, + ) + assert match is not None + except KeyError as exc: + pytest.fail( + "The returned dictionary is missing an expected key. Error: '{}'." + " Dictionary: {}".format(exc, pprint.pformat(ret)) + ) + + +@pytest.mark.parametrize("touch", [True, False]) +@pytest.mark.slow_test +def test_pip_non_existent_log_file(venv_dir, salt_cli, salt_minion, tmp_path, touch): + log_file = tmp_path / "tmp-pip-install.log" + if touch: + log_file.touch() + # Create the testing virtualenv + with VirtualEnv(venv_dir): + ret = salt_cli.run( + "pip.install", + ["pep8"], + log=str(log_file), + bin_env=venv_dir, + minion_tgt=salt_minion.id, + ) + + if not isinstance(ret.data, dict): + pytest.fail( + "The 'pip.install' command did not return the expected dictionary." + " Output:\n{}".format(ret) + ) + + if _check_download_error(ret.stdout): + pytest.skip("Test skipped due to pip download error") + assert ret.returncode == 0 + assert log_file.exists() + assert "pep8" in log_file.read_text() + + +@pytest.mark.skipif( + shutil.which("/bin/pip3") is None, reason="Could not find /bin/pip3" +) +@pytest.mark.skip_on_windows(reason="test specific for linux usage of /bin/python") +@pytest.mark.skip_initial_gh_actions_failure( + reason="This was skipped on older golden images and is failing on newer." +) +def test_system_pip3(salt_cli, salt_minion): + salt_cli.run( + "pip.install", + pkgs=["lazyimport==0.0.1"], + bin_env="/bin/pip3", + minion_tgt=salt_minion.id, + ) + ret1 = salt_cli.run( + "cmd.run_all", "/bin/pip3 freeze | grep lazyimport", minion_tgt=salt_minion.id + ) + assert "lazyimport==0.0.1" in ret1.stdout + + salt_cli.run( + "pip.uninstall", + pkgs=["lazyimport"], + bin_env="/bin/pip3", + minion_tgt=salt_minion.id, + ) + ret2 = salt_cli.run( + "cmd.run_all", "/bin/pip3 freeze | grep lazyimport", minion_tgt=salt_minion.id + ) + assert ret2.data["stdout"] == "" diff --git a/tests/pytests/integration/modules/test_useradd.py b/tests/pytests/integration/modules/test_useradd.py index 5dd32efe8e0f..6b180d89df61 100644 --- a/tests/pytests/integration/modules/test_useradd.py +++ b/tests/pytests/integration/modules/test_useradd.py @@ -7,6 +7,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/integration/modules/test_virt.py b/tests/pytests/integration/modules/test_virt.py index 57ec239c4e91..8a9b1294065b 100644 --- a/tests/pytests/integration/modules/test_virt.py +++ b/tests/pytests/integration/modules/test_virt.py @@ -7,7 +7,6 @@ import pytest -import salt.version from tests.support.virt import SaltVirtMinionContainerFactory docker = pytest.importorskip("docker") @@ -20,19 +19,6 @@ ] -def _install_salt_dependencies(container): - dependencies = [] - for package, version in salt.version.dependency_information(): - if package not in ("packaging", "looseversion"): - # These are newer base dependencies which the container might not - # yet have - continue - dependencies.append(f"{package}=={version}") - if dependencies: - ret = container.run("python3", "-m", "pip", "install", *dependencies) - log.debug("Install missing dependecies ret: %s", ret) - - @pytest.fixture(scope="module") def virt_minion_0_id(): return "virt-minion-0" @@ -66,13 +52,13 @@ def virt_minion_0( "extra_hosts": { virt_minion_0_id: "127.0.0.1", virt_minion_1_id: "127.0.0.1", - } + }, + "cgroupns": "host", }, pull_before_start=True, skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_salt_dependencies, factory) factory.after_terminate( pytest.helpers.remove_stale_minion_key, salt_master, factory.id ) @@ -103,13 +89,13 @@ def virt_minion_1( "extra_hosts": { virt_minion_0_id: "127.0.0.1", virt_minion_1_id: "127.0.0.1", - } + }, + "cgroupns": "host", }, pull_before_start=True, skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_salt_dependencies, factory) factory.after_terminate( pytest.helpers.remove_stale_minion_key, salt_master, factory.id ) @@ -250,10 +236,15 @@ def test_capabilities(self, salt_cli, virt_minion_0): assert len(caps["guests"]) >= 1 assert caps["guests"][0]["os_type"] in ["hvm", "xen", "xenpvh", "exe"] - def test_cpu_baseline(self, salt_cli, virt_minion_0): + def test_cpu_baseline(self, salt_cli, virt_minion_0, grains): """ Test virt.cpu_baseline """ + if grains.get("osarch", "") != "x86_64": + raise pytest.skip.Exception( + f"Test is only meant to run on 'x86_64' architecture, not '{grains['osarch']}'", + _use_item_location=True, + ) vendors = ["Intel", "ARM", "AMD"] ret = salt_cli.run( "virt.cpu_baseline", out="libvirt", minion_tgt=virt_minion_0.id @@ -338,7 +329,12 @@ def virt_domain(): @pytest.fixture -def prep_virt(salt_cli, virt_minion_0, virt_minion_1, virt_domain): +def prep_virt(salt_cli, virt_minion_0, virt_minion_1, virt_domain, grains): + if grains.get("osarch", "") != "x86_64": + raise pytest.skip.Exception( + f"Test is only meant to run on 'x86_64' architecture, not '{grains['osarch']}'", + _use_item_location=True, + ) try: ret = salt_cli.run("virt.list_domains", minion_tgt=virt_minion_0.id) assert ret.returncode == 0, ret @@ -380,13 +376,18 @@ def prep_virt(salt_cli, virt_minion_0, virt_minion_1, virt_domain): @pytest.mark.slow_test @pytest.mark.skip_if_binaries_missing("docker") class TestVirtMigrateTest: - def test_define_xml_path(self, salt_cli, virt_minion_0, virt_domain): + def test_define_xml_path(self, salt_cli, virt_minion_0, virt_domain, grains): """ Define a new domain with virt.define_xml_path, verify that the new domain is shown with virt.list_domains, remove the domain with virt.undefine, and verifies that domain is no longer shown with virt.list_domains. """ + if grains.get("osarch", "") != "x86_64": + raise pytest.skip.Exception( + f"Test is only meant to run on 'x86_64' architecture, not '{grains['osarch']}'", + _use_item_location=True, + ) ret = salt_cli.run( "virt.define_xml_path", "/{}.xml".format(virt_domain), @@ -416,11 +417,13 @@ def test_define_xml_path(self, salt_cli, virt_minion_0, virt_domain): assert domains == [] def test_ssh_migration( - self, salt_cli, virt_minion_0, virt_minion_1, prep_virt, virt_domain + self, salt_cli, virt_minion_0, virt_minion_1, prep_virt, virt_domain, grains ): """ Test domain migration over SSH, TCP and TLS transport protocol """ + if grains["os"] == "VMware Photon OS" and grains["osmajorrelease"] == 3: + pytest.skip("Skipping this test on PhotonOS 3") ret = salt_cli.run("virt.list_active_vms", minion_tgt=virt_minion_0.id) assert ret.returncode == 0, ret diff --git a/tests/pytests/integration/modules/test_x509_v2.py b/tests/pytests/integration/modules/test_x509_v2.py index 2fd005778c51..99d0d2133976 100644 --- a/tests/pytests/integration/modules/test_x509_v2.py +++ b/tests/pytests/integration/modules/test_x509_v2.py @@ -673,6 +673,35 @@ def test_sign_remote_certificate_copypath(x509_salt_call_cli, cert_args, tmp_pat assert (tmp_path / f"{cert.serial_number:x}.crt").exists() +def test_create_private_key(x509_salt_call_cli): + """ + Ensure calling from the CLI works as expected and does not complain + about unknown internal kwargs (__pub_fun etc). + """ + ret = x509_salt_call_cli.run("x509.create_private_key") + assert ret.returncode == 0 + assert ret.data + assert ret.data.startswith("-----BEGIN PRIVATE KEY-----") + + +def test_create_crl(x509_salt_call_cli, ca_key, ca_cert, x509_pkidir): + """ + Ensure calling from the CLI works as expected and does not complain + about unknown internal kwargs (__pub_fun etc). + """ + with pytest.helpers.temp_file("key", ca_key, x509_pkidir) as ca_keyfile: + with pytest.helpers.temp_file("cert", ca_cert, x509_pkidir) as ca_certfile: + ret = x509_salt_call_cli.run( + "x509.create_crl", + revoked=[], + signing_private_key=str(ca_keyfile), + signing_cert=str(ca_certfile), + ) + assert ret.returncode == 0 + assert ret.data + assert ret.data.startswith("-----BEGIN X509 CRL-----") + + def _belongs_to(cert_or_pubkey, privkey): if isinstance(cert_or_pubkey, cx509.Certificate): cert_or_pubkey = cert_or_pubkey.public_key() diff --git a/tests/pytests/integration/proxy/test_deltaproxy.py b/tests/pytests/integration/proxy/test_deltaproxy.py index 4ea69e0b8a51..48f23b18d680 100644 --- a/tests/pytests/integration/proxy/test_deltaproxy.py +++ b/tests/pytests/integration/proxy/test_deltaproxy.py @@ -10,7 +10,8 @@ pytestmark = [ pytest.mark.skip_on_spawning_platform( reason="Deltaproxy minions do not currently work on spawning platforms.", - ) + ), + pytest.mark.slow_test, ] diff --git a/tests/pytests/integration/proxy/test_simple.py b/tests/pytests/integration/proxy/test_simple.py index c1c0cada47ce..f3d26834ef90 100644 --- a/tests/pytests/integration/proxy/test_simple.py +++ b/tests/pytests/integration/proxy/test_simple.py @@ -8,6 +8,7 @@ log = logging.getLogger(__name__) +@pytest.mark.core_test def test_can_it_ping(salt_cli, salt_proxy): """ Ensure the proxy can ping @@ -16,6 +17,7 @@ def test_can_it_ping(salt_cli, salt_proxy): assert ret.data is True +@pytest.mark.slow_test def test_list_pkgs(salt_cli, salt_proxy): """ Package test 1, really just tests that the virtual function capability diff --git a/tests/pytests/integration/reactor/test_reactor.py b/tests/pytests/integration/reactor/test_reactor.py index 875d71623e38..0c73d282a71a 100644 --- a/tests/pytests/integration/reactor/test_reactor.py +++ b/tests/pytests/integration/reactor/test_reactor.py @@ -119,21 +119,21 @@ def test_reactor_is_leader( When leader is set to false reactor should timeout/not do anything. """ ret = salt_run_cli.run("reactor.is_leader") - assert ret.returncode == 0 + assert ret.returncode == 1 assert ( "salt.exceptions.CommandExecutionError: Reactor system is not running." in ret.stdout ) ret = salt_run_cli.run("reactor.set_leader", value=True) - assert ret.returncode == 0 + assert ret.returncode == 1 assert ( "salt.exceptions.CommandExecutionError: Reactor system is not running." in ret.stdout ) ret = salt_run_cli.run("reactor.is_leader") - assert ret.returncode == 0 + assert ret.returncode == 1 assert ( "salt.exceptions.CommandExecutionError: Reactor system is not running." in ret.stdout @@ -220,7 +220,7 @@ def test_reactor_is_leader( # Let's just confirm the engine is not running once again(because the config file is deleted by now) ret = salt_run_cli.run("reactor.is_leader") - assert ret.returncode == 0 + assert ret.returncode == 1 assert ( "salt.exceptions.CommandExecutionError: Reactor system is not running." in ret.stdout diff --git a/tests/pytests/integration/renderers/test_jinja.py b/tests/pytests/integration/renderers/test_jinja.py new file mode 100644 index 000000000000..1a902e2047e7 --- /dev/null +++ b/tests/pytests/integration/renderers/test_jinja.py @@ -0,0 +1,36 @@ +import pytest + +import salt.utils.files + +pytestmark = [ + pytest.mark.slow_test, +] + + +def test_issue_54765_salt(tmp_path, salt_cli, salt_minion): + file_path = str(tmp_path / "issue-54765") + ret = salt_cli.run( + "state.sls", + mods="issue-54765", + pillar={"file_path": file_path}, + minion_tgt=salt_minion.id, + ).data + key = "file_|-issue-54765_|-{}_|-managed".format(file_path) + assert key in ret + assert ret[key]["result"] is True + with salt.utils.files.fopen(file_path, "r") as fp: + assert fp.read().strip() == "bar" + + +def test_issue_54765_call(tmp_path, salt_call_cli): + file_path = str(tmp_path / "issue-54765") + ret = salt_call_cli.run( + "--local", + "state.apply", + "issue-54765", + pillar=f"{{'file_path': '{file_path}'}}", + ) + key = "file_|-issue-54765_|-{}_|-managed".format(file_path) + assert ret.data[key]["result"] is True + with salt.utils.files.fopen(file_path, "r") as fp: + assert fp.read().strip() == "bar" diff --git a/tests/pytests/integration/runners/state/orchestrate/test_events.py b/tests/pytests/integration/runners/state/orchestrate/test_events.py index 4a4b7eb6f645..5ebaf500765a 100644 --- a/tests/pytests/integration/runners/state/orchestrate/test_events.py +++ b/tests/pytests/integration/runners/state/orchestrate/test_events.py @@ -4,16 +4,93 @@ import concurrent.futures import functools import json +import logging import time +import attr import pytest +from saltfactories.utils import random_string import salt.utils.jid import salt.utils.platform +import salt.utils.pycrypto -pytestmark = [ - pytest.mark.slow_test, -] +log = logging.getLogger(__name__) + + +@attr.s(kw_only=True, slots=True) +class TestMasterAccount: + username = attr.ib() + password = attr.ib() + _delete_account = attr.ib(init=False, repr=False, default=False) + + @username.default + def _default_username(self): + return random_string("account-", uppercase=False) + + @password.default + def _default_password(self): + return random_string("pwd-", size=8) + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + +@pytest.fixture(scope="session") +def salt_auth_account_m_factory(): + return TestMasterAccount(username="saltdev-auth-m") + + +@pytest.fixture(scope="module") +def salt_auth_account_m(salt_auth_account_m_factory): + with salt_auth_account_m_factory as account: + yield account + + +@pytest.fixture(scope="module") +def runner_master_config(salt_auth_account_m): + return { + "external_auth": { + "pam": {salt_auth_account_m.username: [{"*": [".*"]}, "@runner", "@wheel"]} + } + } + + +@pytest.fixture(scope="module") +def runner_salt_master(salt_factories, runner_master_config): + factory = salt_factories.salt_master_daemon( + "runner-master", defaults=runner_master_config + ) + with factory.started(): + yield factory + + +@pytest.fixture(scope="module") +def runner_salt_run_cli(runner_salt_master): + return runner_salt_master.salt_run_cli() + + +@pytest.fixture(scope="module") +def runner_salt_call_cli(runner_salt_minion): + return runner_salt_minion.salt_call_cli() + + +@pytest.fixture(scope="module") +def runner_add_user(runner_salt_run_cli, salt_auth_account_m): + ## create user on master to use + ret = runner_salt_run_cli.run("salt.cmd", "user.add", salt_auth_account_m.username) + assert ret.returncode == 0 + + yield + + ## remove user on master + ret = runner_salt_run_cli.run( + "salt.cmd", "user.delete", salt_auth_account_m.username + ) + assert ret.returncode == 0 def test_state_event(salt_run_cli, salt_cli, salt_minion): @@ -109,6 +186,7 @@ def test_jid_in_ret_event(salt_run_cli, salt_master, salt_minion, event_listener # This test is flaky on FreeBSD @pytest.mark.skip_on_freebsd +@pytest.mark.slow_test @pytest.mark.skip_on_spawning_platform( reason="The '__low__' global is not populated on spawning platforms" ) @@ -335,3 +413,106 @@ def test_orchestration_onchanges_and_prereq( # After the file was created, running again in test mode should have # shown no changes. assert not state_data["changes"] + + +@pytest.mark.slow_test +@pytest.mark.skip_if_not_root +@pytest.mark.skip_on_windows +@pytest.mark.skip_on_darwin +def test_unknown_in_runner_event( + runner_salt_run_cli, + runner_salt_master, + salt_minion, + salt_auth_account_m, + runner_add_user, + event_listener, +): + """ + Test to confirm that the ret event for the orchestration contains the + jid for the jobs spawned. + """ + file_roots_base_dir = runner_salt_master.config["file_roots"]["base"][0] + test_top_file_contents = """ + base: + '{minion_id}': + - {file_roots} + """.format( + minion_id=salt_minion.id, file_roots=file_roots_base_dir + ) + test_init_state_contents = """ + always-passes-with-any-kwarg: + test.nop: + - name: foo + - something: else + - foo: bar + always-passes: + test.succeed_without_changes: + - name: foo + always-changes-and-succeeds: + test.succeed_with_changes: + - name: foo + {{slspath}}: + test.nop + """ + test_orch_contents = """ + test_highstate: + salt.state: + - tgt: {minion_id} + - highstate: True + test_runner_metasyntetic: + salt.runner: + - name: test.metasyntactic + - locality: us + """.format( + minion_id=salt_minion.id + ) + with runner_salt_master.state_tree.base.temp_file( + "top.sls", test_top_file_contents + ), runner_salt_master.state_tree.base.temp_file( + "init.sls", test_init_state_contents + ), runner_salt_master.state_tree.base.temp_file( + "orch.sls", test_orch_contents + ): + ret = runner_salt_run_cli.run( + "salt.cmd", "shadow.gen_password", salt_auth_account_m.password + ) + assert ret.returncode == 0 + + gen_pwd = ret.stdout + ret = runner_salt_run_cli.run( + "salt.cmd", "shadow.set_password", salt_auth_account_m.username, gen_pwd + ) + assert ret.returncode == 0 + + jid = salt.utils.jid.gen_jid(runner_salt_master.config) + start_time = time.time() + + ret = runner_salt_run_cli.run( + "--jid", + jid, + "-a", + "pam", + "--username", + salt_auth_account_m.username, + "--password", + salt_auth_account_m.password, + "state.orchestrate", + "orch", + ) + assert not ret.stdout.startswith("Authentication failure") + + expected_new_event_tag = "salt/run/*/new" + event_pattern = (runner_salt_master.id, expected_new_event_tag) + found_events = event_listener.get_events([event_pattern], after_time=start_time) + + for event in found_events: + if event.data["fun"] == "runner.test.metasyntactic": + assert event.data["user"] == salt_auth_account_m.username + + expected_ret_event_tag = "salt/run/*/ret" + event_pattern = (runner_salt_master.id, expected_ret_event_tag) + found_events = event_listener.get_events([event_pattern], after_time=start_time) + + for event in found_events: + if event.data["fun"] == "runner.test.metasyntactic": + assert event.data["user"] == salt_auth_account_m.username diff --git a/tests/pytests/integration/runners/test_jobs.py b/tests/pytests/integration/runners/test_jobs.py new file mode 100644 index 000000000000..bf1d89d8340b --- /dev/null +++ b/tests/pytests/integration/runners/test_jobs.py @@ -0,0 +1,137 @@ +""" +Tests for the salt-run command +""" + +import pytest + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.windows_whitelisted, +] + + +def test_master(salt_run_cli, salt_minion): + """ + jobs.master + """ + ret = salt_run_cli.run("jobs.master") + assert ret.data == [] + assert ret.stdout.strip() == "[]" + + +def test_active(salt_run_cli, salt_minion): + """ + jobs.active + """ + ret = salt_run_cli.run("jobs.active") + assert ret.data == {} + assert ret.stdout.strip() == "{}" + + +def test_lookup_jid(salt_run_cli, salt_minion): + """ + jobs.lookup_jid + """ + ret = salt_run_cli.run("jobs.lookup_jid", "23974239742394") + assert ret.data == {} + assert ret.stdout.strip() == "{}" + + +def test_lookup_jid_invalid(salt_run_cli, salt_minion): + """ + jobs.lookup_jid + """ + ret = salt_run_cli.run("jobs.lookup_jid") + expected = "Passed invalid arguments:" + assert expected in ret.stdout + + +def test_list_jobs(salt_run_cli, salt_minion, salt_cli): + """ + jobs.list_jobs + """ + salt_cli.run("test.echo", "test_list_jobs", minion_tgt=salt_minion.id) + ret = salt_run_cli.run("jobs.list_jobs") + assert isinstance(ret.data, dict) + for job in ret.data.values(): + if job["Function"] != "test.echo": + continue + if job["Arguments"] != ["test_list_jobs"]: + continue + # We found our job in the list, we're good with the test + break + else: + pytest.fail("Did not find our job from the jobs.list_jobs call") + + +def test_target_info(salt_run_cli, salt_minion, salt_cli): + """ + This is a test case for issue #48734 + + PR #43454 fixed an issue where "jobs.lookup_jid" was not working + correctly with external job caches. However, this fix for external + job caches broke some inner workings of job storage when using the + local_cache. + + We need to preserve the previous behavior for the local_cache, but + keep the new behavior for other external job caches. + + If "savefstr" is called in the local cache, the target data does not + get written to the local_cache, and the target-type gets listed as a + "list" type instead of "glob". + + This is a regression test for fixing the local_cache behavior. + """ + salt_cli.run("test.echo", "target_info_test", minion_tgt=salt_minion.id) + ret = salt_run_cli.run("jobs.list_jobs") + for item in ret.data.values(): + if ( + item["Function"] == "test.echo" + and item["Arguments"][0] == "target_info_test" + ): + job_ret = item + tgt = job_ret["Target"] + tgt_type = job_ret["Target-type"] + + assert tgt != "unknown-target" + assert tgt == salt_minion.id + assert tgt_type == "glob" + + +def test_jobs_runner(salt_run_cli, salt_minion): + """ + Test when running a runner job and then + running jobs_list to ensure the job was saved + properly in the cache + """ + salt_run_cli.run("test.arg", "arg1", kwarg1="kwarg1") + ret = salt_run_cli.run("jobs.list_jobs") + jid = None + for key, item in ret.data.items(): + if item["Function"] == "runner.test.arg": + jid = key + + get_job = salt_run_cli.run("jobs.list_job", jid) + assert not get_job.data.get("Error") + assert get_job.data["jid"] == jid + + +def test_target_info_salt_call(salt_run_cli, salt_minion, salt_call_cli): + """ + Check the job infor for a call initiated + with salt call + """ + test = salt_call_cli.run("test.echo", "target_info_test", minion_tgt=salt_minion.id) + ret = salt_run_cli.run("jobs.list_jobs") + for item in ret.data.values(): + if ( + item["Function"] == "test.echo" + and item["Arguments"][0] == "target_info_test" + ): + job_ret = item + tgt = job_ret["Target"] + tgt_type = job_ret["Target-type"] + + assert tgt != "unknown-target" + assert tgt == salt_minion.id + assert tgt_type == "glob" diff --git a/tests/pytests/integration/runners/test_nacl.py b/tests/pytests/integration/runners/test_nacl.py new file mode 100644 index 000000000000..dbdaf9db9ef7 --- /dev/null +++ b/tests/pytests/integration/runners/test_nacl.py @@ -0,0 +1,165 @@ +""" +Tests for the nacl runner +""" + +import pytest + +import salt.config +import salt.utils.stringutils +from tests.support.mock import patch + +pytest.importorskip("nacl.public") +pytest.importorskip("nacl.secret") + +import salt.runners.nacl as nacl + +pytestmark = [ + pytest.mark.windows_whitelisted, +] + + +@pytest.fixture(scope="module") +def minion_opts(): + return salt.config.minion_config(None) + + +@pytest.fixture +def test_data(): + unencrypted_data = salt.utils.stringutils.to_bytes("hello") + return unencrypted_data + + +def test_keygen(minion_opts): + """ + Test keygen + """ + # Store the data + with patch("salt.runners.nacl.__opts__", minion_opts, create=True): + ret = nacl.keygen() + assert "pk" in ret + assert "sk" in ret + + +def test_enc(test_data, minion_opts): + """ + Test keygen + """ + # Store the data + with patch("salt.runners.nacl.__opts__", minion_opts, create=True): + ret = nacl.keygen() + assert "pk" in ret + assert "sk" in ret + pk = ret["pk"] + sk = ret["sk"] + + # Encrypt with pk + ret = nacl.enc( + data=test_data, + pk=pk, + ) + + +def test_enc_dec(test_data, minion_opts): + """ + Store, list, fetch, then flush data + """ + # Store the data + with patch("salt.runners.nacl.__opts__", minion_opts, create=True): + ret = nacl.keygen() + assert "pk" in ret + assert "sk" in ret + pk = ret["pk"] + sk = ret["sk"] + + # Encrypt with pk + encrypted_data = nacl.enc( + data=test_data, + pk=pk, + ) + + # Decrypt with sk + ret = nacl.dec( + data=encrypted_data, + sk=sk, + ) + assert test_data == ret + + +def test_sealedbox_enc_dec(test_data, minion_opts): + """ + Generate keys, encrypt, then decrypt. + """ + # Store the data + with patch("salt.runners.nacl.__opts__", minion_opts, create=True): + ret = nacl.keygen() + assert "pk" in ret + assert "sk" in ret + pk = ret["pk"] + sk = ret["sk"] + + # Encrypt with pk + encrypted_data = nacl.sealedbox_encrypt( + data=test_data, + pk=pk, + ) + + # Decrypt with sk + ret = nacl.sealedbox_decrypt( + data=encrypted_data, + sk=sk, + ) + assert test_data == ret + + +def test_secretbox_enc_dec(test_data, minion_opts): + """ + Generate keys, encrypt, then decrypt. + """ + # Store the data + with patch("salt.runners.nacl.__opts__", minion_opts, create=True): + ret = nacl.keygen() + assert "pk" in ret + assert "sk" in ret + pk = ret["pk"] + sk = ret["sk"] + + # Encrypt with pk + encrypted_data = nacl.secretbox_encrypt( + data=test_data, + sk=sk, + ) + + # Decrypt with sk + ret = nacl.secretbox_decrypt( + data=encrypted_data, + sk=sk, + ) + assert test_data == ret + + +def test_enc_dec_no_pk_no_sk(test_data, minion_opts): + """ + Store, list, fetch, then flush data + """ + # Store the data + with patch("salt.runners.nacl.__opts__", minion_opts, create=True): + ret = nacl.keygen() + assert "pk" in ret + assert "sk" in ret + pk = ret["pk"] + sk = ret["sk"] + + # Encrypt with pk + with pytest.raises(Exception, match="no pubkey or pk_file found"): + ret = nacl.enc( + data=test_data, + pk=None, + ) + + encrypted_data = test_data # dummy data, should get exception + # Decrypt with sk + with pytest.raises(Exception, match="no key or sk_file found"): + ret = nacl.dec( + data=encrypted_data, + sk=None, + ) diff --git a/tests/pytests/integration/runners/test_saltutil.py b/tests/pytests/integration/runners/test_saltutil.py index 179d1b7bf6a7..cc194030adca 100644 --- a/tests/pytests/integration/runners/test_saltutil.py +++ b/tests/pytests/integration/runners/test_saltutil.py @@ -76,7 +76,7 @@ def test_sync( """ Ensure modules are synced when various sync functions are called """ - module_name = "hello_sync_{}".format(module_type) + module_name = f"hello_sync_{module_type}" module_contents = """ def __virtual__(): return "hello" @@ -85,17 +85,45 @@ def world(): return "world" """ - test_moduledir = salt_master.state_tree.base.paths[0] / "_{}".format(module_type) + test_moduledir = salt_master.state_tree.base.write_path / f"_{module_type}" test_moduledir.mkdir(parents=True, exist_ok=True) module_tempfile = salt_master.state_tree.base.temp_file( - "_{}/{}.py".format(module_type, module_name), module_contents + f"_{module_type}/{module_name}.py", module_contents ) - with module_tempfile, test_moduledir: - salt_cmd = "saltutil.sync_{}".format(module_sync_functions[module_type]) + with module_tempfile: + salt_cmd = f"saltutil.sync_{module_sync_functions[module_type]}" ret = salt_run_cli.run(salt_cmd) assert ret.returncode == 0 - assert "{}.hello".format(module_type) in ret.stdout + assert f"{module_type}.hello" in ret.stdout + + +def test_sync_refresh_false( + module_type, module_sync_functions, salt_run_cli, salt_minion, salt_master +): + """ + Ensure modules are synced when various sync functions are called + """ + module_name = f"hello_sync_{module_type}" + module_contents = """ +def __virtual__(): + return "hello" + +def world(): + return "world" +""" + + test_moduledir = salt_master.state_tree.base.write_path / f"_{module_type}" + test_moduledir.mkdir(parents=True, exist_ok=True) + module_tempfile = salt_master.state_tree.base.temp_file( + f"_{module_type}/{module_name}.py", module_contents + ) + + with module_tempfile: + salt_cmd = f"saltutil.sync_{module_sync_functions[module_type]}" + ret = salt_run_cli.run(salt_cmd, saltenv=None, refresh=False) + assert ret.returncode == 0 + assert f"saltutil.sync_{module_sync_functions[module_type]}" in ret.stdout def _write_module_dir_and_file(module_type, salt_minion, salt_master): @@ -111,11 +139,11 @@ def world(): return "world" """ - test_moduledir = salt_master.state_tree.base.paths[0] / "_{}".format(module_type) + test_moduledir = salt_master.state_tree.base.paths[0] / f"_{module_type}" test_moduledir.mkdir(parents=True, exist_ok=True) module_tempfile = salt_master.state_tree.base.temp_file( - "_{}/{}.py".format(module_type, module_name), module_contents + f"_{module_type}/{module_name}.py", module_contents ) return module_tempfile @@ -139,4 +167,4 @@ def test_sync_all(salt_run_cli, salt_minion, salt_master): assert ret.returncode == 0 for module_type in get_module_types(): - assert "{}.hello".format(module_type) in ret.stdout + assert f"{module_type}.hello" in ret.stdout diff --git a/tests/pytests/integration/ssh/state/conftest.py b/tests/pytests/integration/ssh/state/conftest.py new file mode 100644 index 000000000000..14d645ae8e8a --- /dev/null +++ b/tests/pytests/integration/ssh/state/conftest.py @@ -0,0 +1,132 @@ +import pytest + + +@pytest.fixture(scope="module") +def state_tree(base_env_state_tree_root_dir): + top_file = """ + {%- from "map.jinja" import abc with context %} + base: + 'localhost': + - basic + '127.0.0.1': + - basic + """ + map_file = """ + {%- set abc = "def" %} + """ + state_file = """ + {%- from "map.jinja" import abc with context %} + Ok with {{ abc }}: + test.succeed_without_changes + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + map_tempfile = pytest.helpers.temp_file( + "map.jinja", map_file, base_env_state_tree_root_dir + ) + state_tempfile = pytest.helpers.temp_file( + "test.sls", state_file, base_env_state_tree_root_dir + ) + with top_tempfile, map_tempfile, state_tempfile: + yield + + +@pytest.fixture(scope="module") +def state_tree_dir(base_env_state_tree_root_dir): + """ + State tree with files to test salt-ssh + when the map.jinja file is in another directory + """ + top_file = """ + {%- from "test/map.jinja" import abc with context %} + base: + 'localhost': + - test + '127.0.0.1': + - test + """ + map_file = """ + {%- set abc = "def" %} + """ + state_file = """ + {%- from "test/map.jinja" import abc with context %} + + Ok with {{ abc }}: + test.succeed_without_changes + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + map_tempfile = pytest.helpers.temp_file( + "test/map.jinja", map_file, base_env_state_tree_root_dir + ) + state_tempfile = pytest.helpers.temp_file( + "test.sls", state_file, base_env_state_tree_root_dir + ) + + with top_tempfile, map_tempfile, state_tempfile: + yield + + +@pytest.fixture +def nested_state_tree(base_env_state_tree_root_dir, tmp_path): + top_file = """ + base: + 'localhost': + - basic + '127.0.0.1': + - basic + """ + state_file = """ + /{}/file.txt: + file.managed: + - source: salt://foo/file.jinja + - template: jinja + """.format( + tmp_path + ) + file_jinja = """ + {% from 'foo/map.jinja' import comment %}{{ comment }} + """ + map_file = """ + {% set comment = "blah blah" %} + """ + statedir = base_env_state_tree_root_dir / "foo" + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + map_tempfile = pytest.helpers.temp_file("map.jinja", map_file, statedir) + file_tempfile = pytest.helpers.temp_file("file.jinja", file_jinja, statedir) + state_tempfile = pytest.helpers.temp_file("init.sls", state_file, statedir) + + with top_tempfile, map_tempfile, state_tempfile, file_tempfile: + yield + + +@pytest.fixture(scope="module") +def pillar_tree_nested(base_env_pillar_tree_root_dir): + top_file = """ + base: + 'localhost': + - nested + '127.0.0.1': + - nested + """ + nested_pillar = r""" + {%- do salt.log.warning("hithere: pillar was rendered") %} + monty: python + the_meaning: + of: + life: 42 + bar: tender + for: what + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_pillar_tree_root_dir + ) + nested_tempfile = pytest.helpers.temp_file( + "nested.sls", nested_pillar, base_env_pillar_tree_root_dir + ) + with top_tempfile, nested_tempfile: + yield diff --git a/tests/pytests/integration/ssh/state/test_pillar_override.py b/tests/pytests/integration/ssh/state/test_pillar_override.py new file mode 100644 index 000000000000..c7f49b2b816d --- /dev/null +++ b/tests/pytests/integration/ssh/state/test_pillar_override.py @@ -0,0 +1,189 @@ +""" +Ensure pillar overrides are merged recursively, that wrapper +modules are in sync with the pillar dict in the rendering environment +and that the pillars are available on the target. +""" + +import json + +import pytest + +import salt.utils.dictupdate + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.usefixtures("pillar_tree_nested"), + pytest.mark.slow_test, +] + + +def test_pillar_is_only_rendered_once_without_overrides(salt_ssh_cli, caplog): + ret = salt_ssh_cli.run("state.apply", "test") + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert ret.data + assert ret.data[next(iter(ret.data))]["result"] is True + assert caplog.text.count("hithere: pillar was rendered") == 1 + + +def test_pillar_is_rerendered_with_overrides(salt_ssh_cli, caplog): + ret = salt_ssh_cli.run("state.apply", "test", pillar={"foo": "bar"}) + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert ret.data + assert ret.data[next(iter(ret.data))]["result"] is True + assert caplog.text.count("hithere: pillar was rendered") == 2 + + +@pytest.fixture(scope="module", autouse=True) +def _show_pillar_state(base_env_state_tree_root_dir): + top_file = """ + base: + 'localhost': + - showpillar + '127.0.0.1': + - showpillar + """ + show_pillar_sls = """ + deep_thought: + test.show_notification: + - text: '{{ { + "raw": { + "the_meaning": pillar.get("the_meaning"), + "btw": pillar.get("btw")}, + "wrapped": { + "the_meaning": salt["pillar.get"]("the_meaning"), + "btw": salt["pillar.get"]("btw")}} + | json }}' + + target_check: + test.check_pillar: + - present: + - the_meaning:of:foo + - btw + - the_meaning:of:bar + - the_meaning:for + - listing: + - the_meaning:of:life + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + show_tempfile = pytest.helpers.temp_file( + "showpillar.sls", show_pillar_sls, base_env_state_tree_root_dir + ) + with top_tempfile, show_tempfile: + yield + + +@pytest.fixture +def base(): + return {"the_meaning": {"of": {"life": 42, "bar": "tender"}, "for": "what"}} + + +@pytest.fixture +def override(base): + poverride = { + "the_meaning": {"of": {"life": [2.71], "foo": "lish"}}, + "btw": "turtles", + } + expected = salt.utils.dictupdate.merge(base, poverride) + return expected, poverride + + +def test_state_sls(salt_ssh_cli, override): + expected, override = override + ret = salt_ssh_cli.run("state.sls", "showpillar", pillar=override) + _assert_basic(ret) + assert len(ret.data) == 2 + for sid, sret in ret.data.items(): + if "show" in sid: + _assert_pillar(sret["comment"], expected) + else: + assert sret["result"] is True + + +@pytest.mark.parametrize("sid", ("deep_thought", "target_check")) +def test_state_sls_id(salt_ssh_cli, sid, override): + expected, override = override + ret = salt_ssh_cli.run("state.sls_id", sid, "showpillar", pillar=override) + _assert_basic(ret) + state_res = ret.data[next(iter(ret.data))] + if sid == "deep_thought": + _assert_pillar(state_res["comment"], expected) + else: + assert state_res["result"] is True + + +def test_state_highstate(salt_ssh_cli, override): + expected, override = override + ret = salt_ssh_cli.run("state.highstate", pillar=override, whitelist=["showpillar"]) + _assert_basic(ret) + assert len(ret.data) == 2 + for sid, sret in ret.data.items(): + if "show" in sid: + _assert_pillar(sret["comment"], expected) + else: + assert sret["result"] is True + + +def test_state_show_sls(salt_ssh_cli, override): + expected, override = override + ret = salt_ssh_cli.run("state.show_sls", "showpillar", pillar=override) + _assert_basic(ret) + pillar = ret.data["deep_thought"]["test"] + pillar = next(x["text"] for x in pillar if isinstance(x, dict)) + _assert_pillar(pillar, expected) + + +def test_state_show_low_sls(salt_ssh_cli, override): + expected, override = override + ret = salt_ssh_cli.run("state.show_low_sls", "showpillar", pillar=override) + _assert_basic(ret, list) + pillar = ret.data[0]["text"] + _assert_pillar(pillar, expected) + + +def test_state_single(salt_ssh_cli, override): + expected, override = override + ret = salt_ssh_cli.run( + "state.single", + "test.check_pillar", + "foo", + present=[ + "the_meaning:of:foo", + "btw", + "the_meaning:of:bar", + "the_meaning:for", + ], + listing=["the_meaning:of:life"], + pillar=override, + ) + _assert_basic(ret, dict) + state_res = ret.data[next(iter(ret.data))] + assert state_res["result"] is True + + +def test_state_top(salt_ssh_cli, override): + expected, override = override + ret = salt_ssh_cli.run("state.top", "top.sls", pillar=override) + _assert_basic(ret) + assert len(ret.data) == 2 + for sid, sret in ret.data.items(): + if "show" in sid: + _assert_pillar(sret["comment"], expected) + else: + assert sret["result"] is True + + +def _assert_pillar(pillar, expected): + if not isinstance(pillar, dict): + pillar = json.loads(pillar) + assert pillar["raw"] == expected + assert pillar["wrapped"] == expected + + +def _assert_basic(ret, typ=dict): + assert ret.returncode == 0 + assert isinstance(ret.data, typ) + assert ret.data diff --git a/tests/pytests/integration/ssh/state/test_pillar_override_template.py b/tests/pytests/integration/ssh/state/test_pillar_override_template.py new file mode 100644 index 000000000000..610efe7040f8 --- /dev/null +++ b/tests/pytests/integration/ssh/state/test_pillar_override_template.py @@ -0,0 +1,97 @@ +""" +Specifically ensure that pillars are merged as expected +for the target as well and available for renderers. +This should be covered by `test.check_pillar` above, but +let's check the specific output for the most important funcs. +Issue #59802 +""" + +import json + +import pytest + +import salt.utils.dictupdate + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.usefixtures("pillar_tree_nested"), + pytest.mark.slow_test, +] + + +@pytest.fixture +def _write_pillar_state(base_env_state_tree_root_dir, tmp_path_factory): + tmp_path = tmp_path_factory.mktemp("tgtdir") + tgt_file = tmp_path / "deepthought.txt" + top_file = """ + base: + 'localhost': + - writepillar + '127.0.0.1': + - writepillar + """ + nested_pillar_file = f""" + deep_thought: + file.managed: + - name: {tgt_file} + - source: salt://deepthought.txt.jinja + - template: jinja + """ + deepthought = r""" + {{ + { + "raw": { + "the_meaning": pillar.get("the_meaning"), + "btw": pillar.get("btw")}, + "modules": { + "the_meaning": salt["pillar.get"]("the_meaning"), + "btw": salt["pillar.get"]("btw")} + } | json }} + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + show_tempfile = pytest.helpers.temp_file( + "writepillar.sls", nested_pillar_file, base_env_state_tree_root_dir + ) + deepthought_tempfile = pytest.helpers.temp_file( + "deepthought.txt.jinja", deepthought, base_env_state_tree_root_dir + ) + + with top_tempfile, show_tempfile, deepthought_tempfile: + yield tgt_file + + +@pytest.fixture +def base(): + return {"the_meaning": {"of": {"life": 42, "bar": "tender"}, "for": "what"}} + + +@pytest.fixture +def override(base): + poverride = { + "the_meaning": {"of": {"life": 2.71, "foo": "lish"}}, + "btw": "turtles", + } + expected = salt.utils.dictupdate.merge(base, poverride) + return expected, poverride + + +@pytest.mark.parametrize( + "args,kwargs", + ( + (("state.sls", "writepillar"), {}), + (("state.highstate",), {"whitelist": "writepillar"}), + (("state.top", "top.sls"), {}), + ), +) +def test_it(salt_ssh_cli, args, kwargs, override, _write_pillar_state): + expected, override = override + ret = salt_ssh_cli.run(*args, **kwargs, pillar=override) + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert ret.data + assert _write_pillar_state.exists() + pillar = json.loads(_write_pillar_state.read_text()) + assert pillar["raw"] == expected + assert pillar["modules"] == expected diff --git a/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_requisite_fail.py b/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_requisite_fail.py new file mode 100644 index 000000000000..6b629a248d0d --- /dev/null +++ b/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_requisite_fail.py @@ -0,0 +1,62 @@ +""" +Verify salt-ssh fails with a retcode > 0 when a highstate verification fails. +``state.show_highstate`` does not validate this. +""" + +import pytest + +from salt.defaults.exitcodes import EX_AGGREGATE + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module", autouse=True) +def state_tree_req_fail(base_env_state_tree_root_dir): + top_file = """ + base: + 'localhost': + - fail_req + '127.0.0.1': + - fail_req + """ + state_file = """ + This has an invalid requisite: + test.nop: + - name: foo + - require_in: + - file.managed: invalid_requisite + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + state_tempfile = pytest.helpers.temp_file( + "fail_req.sls", state_file, base_env_state_tree_root_dir + ) + with top_tempfile, state_tempfile: + yield + + +@pytest.mark.parametrize( + "args,retcode", + ( + (("state.sls", "fail_req"), EX_AGGREGATE), + (("state.highstate",), EX_AGGREGATE), + (("state.show_sls", "fail_req"), EX_AGGREGATE), + (("state.show_low_sls", "fail_req"), EX_AGGREGATE), + # state.show_lowstate exits with 0 for non-ssh as well + (("state.show_lowstate",), 0), + (("state.top", "top.sls"), EX_AGGREGATE), + ), +) +def test_it(salt_ssh_cli, args, retcode): + ret = salt_ssh_cli.run(*args) + assert ret.returncode == retcode + assert isinstance(ret.data, list) + assert ret.data + assert isinstance(ret.data[0], str) + assert ret.data[0].startswith( + "Invalid requisite in require: file.managed for invalid_requisite" + ) diff --git a/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_structure_fail.py b/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_structure_fail.py new file mode 100644 index 000000000000..c74de1e20fd4 --- /dev/null +++ b/tests/pytests/integration/ssh/state/test_retcode_highstate_verification_structure_fail.py @@ -0,0 +1,64 @@ +""" +Verify salt-ssh fails with a retcode > 0 when a highstate verification fails. +This targets another step of the verification. +``state.sls_id`` does not seem to support extends. +``state.show_highstate`` does not validate this. +""" + +import pytest + +from salt.defaults.exitcodes import EX_AGGREGATE + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module", autouse=True) +def state_tree_structure_fail(base_env_state_tree_root_dir): + top_file = """ + base: + 'localhost': + - fail_structure + '127.0.0.1': + - fail_structure + """ + state_file = """ + extend: + Some file state: + file: + - name: /tmp/bar + - contents: bar + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + state_tempfile = pytest.helpers.temp_file( + "fail_structure.sls", state_file, base_env_state_tree_root_dir + ) + with top_tempfile, state_tempfile: + yield + + +@pytest.mark.parametrize( + "args,retcode", + ( + (("state.sls", "fail_structure"), EX_AGGREGATE), + (("state.highstate",), EX_AGGREGATE), + (("state.show_sls", "fail_structure"), EX_AGGREGATE), + (("state.show_low_sls", "fail_structure"), EX_AGGREGATE), + # state.show_lowstate exits with 0 for non-ssh as well + (("state.show_lowstate",), 0), + (("state.top", "top.sls"), EX_AGGREGATE), + ), +) +def test_it(salt_ssh_cli, args, retcode): + ret = salt_ssh_cli.run(*args) + assert ret.returncode == retcode + assert isinstance(ret.data, list) + assert ret.data + assert isinstance(ret.data[0], str) + assert ret.data[0].startswith( + "Cannot extend ID 'Some file state' in 'base:fail_structure" + ) diff --git a/tests/pytests/integration/ssh/state/test_retcode_pillar_render_exception.py b/tests/pytests/integration/ssh/state/test_retcode_pillar_render_exception.py new file mode 100644 index 000000000000..92cd56dbb39c --- /dev/null +++ b/tests/pytests/integration/ssh/state/test_retcode_pillar_render_exception.py @@ -0,0 +1,57 @@ +""" +Verify salt-ssh fails with a retcode > 0 when a pillar rendering fails. +""" + +import pytest + +from salt.defaults.exitcodes import EX_AGGREGATE + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module", autouse=True) +def pillar_tree_render_fail(base_env_pillar_tree_root_dir): + top_file = """ + base: + 'localhost': + - fail_render + '127.0.0.1': + - fail_render + """ + pillar_file = r""" + not_defined: {{ abc }} + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_pillar_tree_root_dir + ) + pillar_tempfile = pytest.helpers.temp_file( + "fail_render.sls", pillar_file, base_env_pillar_tree_root_dir + ) + with top_tempfile, pillar_tempfile: + yield + + +@pytest.mark.parametrize( + "args", + ( + ("state.sls", "basic"), + ("state.highstate",), + ("state.sls_id", "foo", "basic"), + ("state.show_sls", "basic"), + ("state.show_low_sls", "basic"), + ("state.show_highstate",), + ("state.show_lowstate",), + ("state.top", "top.sls"), + ), +) +def test_it(salt_ssh_cli, args): + ret = salt_ssh_cli.run(*args) + assert ret.returncode == EX_AGGREGATE + assert isinstance(ret.data, list) + assert ret.data + assert isinstance(ret.data[0], str) + assert ret.data[0] == "Pillar failed to render with the following messages:" + assert ret.data[1].startswith("Rendering SLS 'fail_render' failed.") diff --git a/tests/pytests/integration/ssh/state/test_retcode_render_exception.py b/tests/pytests/integration/ssh/state/test_retcode_render_exception.py new file mode 100644 index 000000000000..5291e107b91f --- /dev/null +++ b/tests/pytests/integration/ssh/state/test_retcode_render_exception.py @@ -0,0 +1,67 @@ +""" +Verify salt-ssh fails with a retcode > 0 when a state rendering fails. +""" + +import pytest + +from salt.defaults.exitcodes import EX_AGGREGATE + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module", autouse=True) +def state_tree_render_fail(base_env_state_tree_root_dir): + top_file = """ + base: + 'localhost': + - fail_render + '127.0.0.1': + - fail_render + """ + state_file = r""" + abc var is not defined {{ abc }}: + test.nop + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + state_tempfile = pytest.helpers.temp_file( + "fail_render.sls", state_file, base_env_state_tree_root_dir + ) + with top_tempfile, state_tempfile: + yield + + +@pytest.mark.parametrize( + "args,retcode", + ( + (("state.sls", "fail_render"), EX_AGGREGATE), + (("state.highstate",), EX_AGGREGATE), + (("state.sls_id", "foo", "fail_render"), EX_AGGREGATE), + (("state.show_sls", "fail_render"), EX_AGGREGATE), + (("state.show_low_sls", "fail_render"), EX_AGGREGATE), + (("state.show_highstate",), EX_AGGREGATE), + # state.show_lowstate exits with 0 for non-ssh as well + (("state.show_lowstate",), 0), + (("state.top", "top.sls"), EX_AGGREGATE), + ), +) +def test_it(salt_ssh_cli, args, retcode): + ret = salt_ssh_cli.run(*args) + assert ret.returncode == retcode + assert isinstance(ret.data, list) + assert ret.data + assert isinstance(ret.data[0], str) + assert ret.data[0].startswith( + "Rendering SLS 'base:fail_render' failed: Jinja variable 'abc' is undefined;" + ) + + +def test_state_single(salt_ssh_cli): + ret = salt_ssh_cli.run("state.single", "file") + assert ret.returncode == EX_AGGREGATE + assert isinstance(ret.data, str) + assert "single() missing 1 required positional argument" in ret.data diff --git a/tests/pytests/integration/ssh/state/test_retcode_run_fail.py b/tests/pytests/integration/ssh/state/test_retcode_run_fail.py new file mode 100644 index 000000000000..e77768dc5102 --- /dev/null +++ b/tests/pytests/integration/ssh/state/test_retcode_run_fail.py @@ -0,0 +1,52 @@ +""" +Verify salt-ssh passes on a failing retcode from state execution. +""" + +import pytest + +from salt.defaults.exitcodes import EX_AGGREGATE + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module", autouse=True) +def state_tree_run_fail(base_env_state_tree_root_dir): + top_file = """ + base: + 'localhost': + - fail_run + '127.0.0.1': + - fail_run + """ + state_file = """ + This file state fails: + file.managed: + - name: /tmp/non/ex/is/tent + - makedirs: false + - contents: foo + """ + top_tempfile = pytest.helpers.temp_file( + "top.sls", top_file, base_env_state_tree_root_dir + ) + state_tempfile = pytest.helpers.temp_file( + "fail_run.sls", state_file, base_env_state_tree_root_dir + ) + with top_tempfile, state_tempfile: + yield + + +@pytest.mark.parametrize( + "args", + ( + ("state.sls", "fail_run"), + ("state.highstate",), + ("state.sls_id", "This file state fails", "fail_run"), + ("state.top", "top.sls"), + ), +) +def test_it(salt_ssh_cli, args): + ret = salt_ssh_cli.run(*args) + assert ret.returncode == EX_AGGREGATE diff --git a/tests/pytests/integration/ssh/state/test_state.py b/tests/pytests/integration/ssh/state/test_state.py new file mode 100644 index 000000000000..62e8cbf513b1 --- /dev/null +++ b/tests/pytests/integration/ssh/state/test_state.py @@ -0,0 +1,103 @@ +import pytest + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +def test_state_with_import(salt_ssh_cli, state_tree): + """ + verify salt-ssh can use imported map files in states + """ + ret = salt_ssh_cli.run("state.sls", "test") + assert ret.returncode == 0 + assert ret.data + + +@pytest.mark.parametrize( + "ssh_cmd", + [ + "state.sls", + "state.highstate", + "state.apply", + "state.show_top", + "state.show_highstate", + "state.show_low_sls", + "state.show_lowstate", + "state.sls_id", + "state.show_sls", + "state.top", + ], +) +def test_state_with_import_dir(salt_ssh_cli, state_tree_dir, ssh_cmd): + """ + verify salt-ssh can use imported map files in states + when the map files are in another directory outside of + sls files importing them. + """ + if ssh_cmd in ("state.sls", "state.show_low_sls", "state.show_sls"): + ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "test") + elif ssh_cmd == "state.top": + ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "top.sls") + elif ssh_cmd == "state.sls_id": + ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "Ok with def", "test") + else: + ret = salt_ssh_cli.run("-w", "-t", ssh_cmd) + assert ret.returncode == 0 + if ssh_cmd == "state.show_top": + assert ret.data == {"base": ["test", "master_tops_test"]} or {"base": ["test"]} + elif ssh_cmd in ("state.show_highstate", "state.show_sls"): + assert ret.data == { + "Ok with def": { + "__sls__": "test", + "__env__": "base", + "test": ["succeed_without_changes", {"order": 10000}], + } + } + elif ssh_cmd in ("state.show_low_sls", "state.show_lowstate", "state.show_sls"): + assert ret.data == [ + { + "state": "test", + "name": "Ok with def", + "__sls__": "test", + "__env__": "base", + "__id__": "Ok with def", + "order": 10000, + "fun": "succeed_without_changes", + } + ] + else: + assert ret.data["test_|-Ok with def_|-Ok with def_|-succeed_without_changes"][ + "result" + ] + assert ret.data + + +def test_state_with_import_from_dir(salt_ssh_cli, nested_state_tree): + """ + verify salt-ssh can use imported map files in states + """ + ret = salt_ssh_cli.run( + "--extra-filerefs=salt://foo/map.jinja", "state.apply", "foo" + ) + assert ret.returncode == 0 + assert ret.data + + +def test_state_low(salt_ssh_cli): + """ + test state.low with salt-ssh + """ + ret = salt_ssh_cli.run( + "state.low", '{"state": "cmd", "fun": "run", "name": "echo blah"}' + ) + assert ret.data["cmd_|-echo blah_|-echo blah_|-run"]["changes"]["stdout"] == "blah" + + +def test_state_high(salt_ssh_cli): + """ + test state.high with salt-ssh + """ + ret = salt_ssh_cli.run("state.high", '{"echo blah": {"cmd": ["run"]}}') + assert ret.data["cmd_|-echo blah_|-echo blah_|-run"]["changes"]["stdout"] == "blah" diff --git a/tests/pytests/integration/ssh/test_deploy.py b/tests/pytests/integration/ssh/test_deploy.py index dac28f84f373..c95bb40a3110 100644 --- a/tests/pytests/integration/ssh/test_deploy.py +++ b/tests/pytests/integration/ssh/test_deploy.py @@ -9,6 +9,7 @@ import salt.utils.files import salt.utils.yaml +from salt.defaults.exitcodes import EX_AGGREGATE pytestmark = [ pytest.mark.slow_test, @@ -74,15 +75,13 @@ def test_set_path(salt_ssh_cli, tmp_path, salt_ssh_roster_file): roster_data = salt.utils.yaml.safe_load(rfh) roster_data["localhost"].update( { - "set_path": "$PATH:/usr/local/bin/:{}".format(path), + "set_path": f"$PATH:/usr/local/bin/:{path}", } ) with salt.utils.files.fopen(roster_file, "w") as wfh: salt.utils.yaml.safe_dump(roster_data, wfh) - ret = salt_ssh_cli.run( - "--roster-file={}".format(roster_file), "environ.get", "PATH" - ) + ret = salt_ssh_cli.run(f"--roster-file={roster_file}", "environ.get", "PATH") assert ret.returncode == 0 assert path in ret.data @@ -97,6 +96,29 @@ def test_tty(salt_ssh_cli, tmp_path, salt_ssh_roster_file): roster_data["localhost"].update({"tty": True}) with salt.utils.files.fopen(roster_file, "w") as wfh: salt.utils.yaml.safe_dump(roster_data, wfh) - ret = salt_ssh_cli.run("--roster-file={}".format(roster_file), "test.ping") + ret = salt_ssh_cli.run(f"--roster-file={roster_file}", "test.ping") assert ret.returncode == 0 assert ret.data is True + + +def test_retcode_exe_run_fail(salt_ssh_cli): + """ + Verify salt-ssh passes through the retcode it receives. + """ + ret = salt_ssh_cli.run("file.touch", "/tmp/non/ex/is/tent") + assert ret.returncode == EX_AGGREGATE + assert isinstance(ret.data, dict) + assert "Error running 'file.touch': No such file or directory" in ret.data["stderr"] + assert ret.data["retcode"] == 1 + + +def test_retcode_exe_run_exception(salt_ssh_cli): + """ + Verify salt-ssh passes through the retcode it receives + when an exception is thrown. (Ref #50727) + """ + ret = salt_ssh_cli.run("salttest.jinja_error") + assert ret.returncode == EX_AGGREGATE + assert isinstance(ret.data, dict) + assert ret.data["stderr"].endswith("Exception: hehehe") + assert ret.data["retcode"] == 1 diff --git a/tests/pytests/integration/ssh/test_pillar_compilation.py b/tests/pytests/integration/ssh/test_pillar_compilation.py new file mode 100644 index 000000000000..688ab1f087fa --- /dev/null +++ b/tests/pytests/integration/ssh/test_pillar_compilation.py @@ -0,0 +1,302 @@ +import logging +import pathlib +import shutil +import subprocess +import textwrap + +import pytest +from pytestshellutils.utils.processes import ProcessResult + +log = logging.getLogger(__name__) + + +# The following fixtures are copied from pytests/functional/pillar/test_gpg.py + + +@pytest.fixture(scope="module") +def test_key(): + """ + Private key for setting up GPG pillar environment. + """ + return textwrap.dedent( + """\ + -----BEGIN PGP PRIVATE KEY BLOCK----- + + lQOYBFiKrcYBCADAj92+fz20uKxxH0ffMwcryGG9IogkiUi2QrNYilB4hwrY5Qt7 + Sbywlk/mSDMcABxMxS0vegqc5pgglvAnsi9w7j//9nfjiirsyiTYOOD1akTFQr7b + qT6zuGFA4oYmYHvfBOena485qvlyitYLKYT9h27TDiiH6Jgt4xSRbjeyhTf3/fKD + JzHA9ii5oeVi1pH/8/4USgXanBdKwO0JKQtci+PF0qe/nkzRswqTIkdgx1oyNUqL + tYJ0XPOy+UyOC4J4QDIt9PQbAmiur8By4g2lLYWlGOCjs7Fcj3n5meWKzf1pmXoY + lAnSab8kUZSSkoWQoTO7RbjFypULKCZui45/ABEBAAEAB/wM1wsAMtfYfx/wgxd1 + yJ9HyhrKU80kMotIq/Xth3uKLecJQ2yakfYlCEDXqCTQTymT7OnwaoDeqXmnYqks + 3HLRYvGdjb+8ym/GTkxapqBJfQaM6MB1QTnPHhJOE0zCrlhULK2NulxYihAMFTnk + kKYviaJYLG+DcH0FQkkS0XihTKcqnsoJiS6iNd5SME3pa0qijR0D5f78fkvNzzEE + 9vgAX1TgQ5PDJGN6nYlW2bWxTcg+FR2cUAQPTiP9wXCH6VyJoQay7KHVr3r/7SsU + 89otfcx5HVDYPrez6xnP6wN0P/mKxCDbkERLDjZjWOmNXg2zn+/t3u02e+ybfAIp + kTTxBADY/FmPgLpJ2bpcPH141twpHwhKIbENlTB9745Qknr6aLA0QVCkz49/3joO + Sj+SZ7Jhl6cfbynrfHwX3b1bOFTzBUH2Tsi0HX40PezEFH0apf55FLZuMOBt/lc1 + ET6evpIHF0dcM+BvZa7E7MyTyEq8S7Cc9RoJyfeGbS7MG5FfuwQA4y9QOb/OQglq + ZffkVItwY52RKWb/b2WQmt+IcVax/j7DmBva765SIfPDvOCMrYhJBI/uYHQ0Zia7 + SnC9+ez55wdYqgHkYojc21CIOnUvsPSj+rOpryoXzmcTuvKeVIyIA0h/mQyWjimR + ENrikC4+O8GBMY6V4uvS4EFhLfHE9g0D/20lNOKkpAKPenr8iAPWcl0/pijJCGxF + agnT7O2GQ9Lr5hSjW86agkevbGktu2ja5t/fHq0wpLQ4DVLMrR0/poaprTr307kW + AlQV3z/C2cMHNysz4ulOgQrudQbhUEz2A8nQxRtIfWunkEugKLr1QiCkE1LJW8Np + ZLxE6Qp0/KzdQva0HVNhbHQgR1BHIDxlcmlrQHNhbHRzdGFjay5jb20+iQFUBBMB + CAA+FiEE+AxQ1ELHGEyFTZPYw5x3k9EbHGsFAliKrcYCGwMFCQPCZwAFCwkIBwIG + FQgJCgsCBBYCAwECHgECF4AACgkQw5x3k9EbHGubUAf+PLdp1oTLVokockZgLyIQ + wxOd3ofNOgNk4QoAkSMNSbtnYoQFKumRw/yGyPSIoHMsOC/ga98r8TAJEKfx3DLA + rsD34oMAaYUT+XUd0KoSmlHqBrtDD1+eBASKYsCosHpCiKuQFfLKSxvpEr2YyL8L + X3Q2TY5zFlGA9Eeq5g+rlb++yRZrruFN28EWtY/pyXFZgIB30ReDwPkM9hrioPZM + 0Qf3+dWZSK1rWViclB51oNy4un9stTiFZptAqz4NTNssU5A4AcNQPwBwnKIYoE58 + Y/Zyv8HzILGykT+qFebqRlRBI/13eHdzgJOL1iPRfjTk5Cvr+vcyIxAklXOP81ja + B50DmARYiq3GAQgArnzu4SPCCQGNcCNxN4QlMP5TNvRsm5KrPbcO9j8HPfB+DRXs + 6B3mnuR6OJg7YuC0C2A/m2dSHJKkF0f2AwFRpxLjJ2iAFbrZAW/N0vZDx8zO+YAU + HyLu0V04wdCE5DTLkgfWNR+0uMa8qZ4Kn56Gv7O+OFE7zgTHeZ7psWlxdafeW7u6 + zlC/3DWksNtuNb0vQDNMM4vgXbnORIfXdyh41zvEEnr/rKw8DuJAmo20mcv6Qi51 + PqqyM62ddQOEVfiMs9l4vmwZAjGFNFNInyPXnogL6UPCDmizb6hh8aX/MwG/XFIG + KMJWbAVGpyBuqljKIt3qLu/s8ouPqkEN+f+nGwARAQABAAf+NA36d/kieGxZpTQ1 + oQHP1Jty+OiXhBwP8SPtF0J7ZxuZh07cs+zDsfBok/y6bsepfuFSaIq84OBQis+B + kajxkp3cXZPb7l+lQLv5k++7Dd7Ien+ewSE7TQN6HLwYATrM5n5nBcc1M5C6lQGc + mr0A5yz42TVG2bHsTpi9kBtsaVRSPUHSh8A8T6eOyCrT+/CAJVEEf7JyNyaqH1dy + LuxI1VF3ySDEtFzuwN8EZQP9Yz/4AVyEQEA7WkNEwSQsBi2bWgWEdG+qjqnL+YKa + vwe7/aJYPeL1zICnP/Osd/UcpDxR78MbozstbRljML0fTLj7UJ+XDazwv+Kl0193 + 2ZK2QQQAwgXvS19MYNkHO7kbNVLt1VE2ll901iC9GFHBpFUam6gmoHXpCarB+ShH + 8x25aoUu4MxHmFxXd+Zq3d6q2yb57doWoPgvqcefpGmigaITnb1jhV2rt65V8deA + SQazZNqBEBbZNIhfn6ObxHXXvaYaqq/UOEQ7uKyR9WMJT/rmqMEEAOY5h1R1t7AB + JZ5VnhyAhdsNWw1gTcXB3o8gKz4vjdnPm0F4aVIPfB3BukETDc3sc2tKmCfUF7I7 + oOrh7iRez5F0RIC3KDzXF8qUuWBfPViww45JgftdKsecCIlEEYCoc+3goX0su2bP + V1MDuHijMGTJCBABDgizNb0oynW5xcrbA/0QnKfpTwi7G3oRcJWv2YebVDRcU+SP + dOYhq6SnmWPizEIljRG/X7FHJB+W7tzryO3sCDTAYwxFrfMwvJ2PwnAYI4349zYd + lC28HowUkBYNhwBXc48xCfyhPZtD0aLx/OX1oLZ/vi8gd8TusgGupV/JjkFVO+Nd + +shN/UEAldwqkkY2iQE8BBgBCAAmFiEE+AxQ1ELHGEyFTZPYw5x3k9EbHGsFAliK + rcYCGwwFCQPCZwAACgkQw5x3k9EbHGu4wwf/dRFat91BRX1TJfwJl5otoAXpItYM + 6kdWWf1Eb1BicAvXhI078MSH4WXdKkJjJr1fFP8Ynil513H4Mzb0rotMAhb0jLSA + lSRkMbhMvPxoS2kaYzioaBpp8yXpGiNo7dF+PJXSm/Uwp3AkcFjoVbBOqDWGgxMi + DvDAstzLZ9dIcmr+OmcRQykKOKXlhEl3HnR5CyuPrA8hdVup4oeVwdkJhfJFKLLb + 3fR26wxJOmIOAt24eAUy721WfQ9txNAmhdy8mY842ODZESw6WatrQjRfuqosDgrk + jc0cCHsEqJNZ2AB+1uEl3tcH0tyAFJa33F0znSonP17SS1Ff9sgHYBVLUg== + =06Tz + -----END PGP PRIVATE KEY BLOCK----- + """ + ) + + +@pytest.fixture(scope="module") +def gpg_pillar_yaml(): + """ + Yaml data for testing GPG pillar. + """ + return textwrap.dedent( + """ + #!yaml|gpg + secrets: + foo: | + -----BEGIN PGP MESSAGE----- + + hQEMAw2B674HRhwSAQgAhTrN8NizwUv/VunVrqa4/X8t6EUulrnhKcSeb8sZS4th + W1Qz3K2NjL4lkUHCQHKZVx/VoZY7zsddBIFvvoGGfj8+2wjkEDwFmFjGE4DEsS74 + ZLRFIFJC1iB/O0AiQ+oU745skQkU6OEKxqavmKMrKo3rvJ8ZCXDC470+i2/Hqrp7 + +KWGmaDOO422JaSKRm5D9bQZr9oX7KqnrPG9I1+UbJyQSJdsdtquPWmeIpamEVHb + VMDNQRjSezZ1yKC4kCWm3YQbBF76qTHzG1VlLF5qOzuGI9VkyvlMaLfMibriqY73 + zBbPzf6Bkp2+Y9qyzuveYMmwS4sEOuZL/PetqisWe9JGAWD/O+slQ2KRu9hNww06 + KMDPJRdyj5bRuBVE4hHkkP23KrYr7SuhW2vpe7O/MvWEJ9uDNegpMLhTWruGngJh + iFndxegN9w== + =bAuo + -----END PGP MESSAGE----- + """ + ) + + +@pytest.fixture(scope="module") +def gpg_homedir(salt_master, test_key): + """ + Setup gpg environment + """ + _gpg_homedir = pathlib.Path(salt_master.config_dir) / "gpgkeys" + _gpg_homedir.mkdir(0o700) + agent_started = False + try: + cmd_prefix = ["gpg", "--homedir", str(_gpg_homedir)] + + cmd = cmd_prefix + ["--list-keys"] + proc = subprocess.run( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + check=True, + universal_newlines=True, + ) + ret = ProcessResult( + returncode=proc.returncode, + stdout=proc.stdout, + stderr=proc.stderr or "", + cmdline=proc.args, + ) + log.debug("Instantiating gpg keyring...\n%s", ret) + + cmd = cmd_prefix + ["--import", "--allow-secret-key-import"] + proc = subprocess.run( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + check=True, + universal_newlines=True, + input=test_key, + ) + ret = ProcessResult( + returncode=proc.returncode, + stdout=proc.stdout, + stderr=proc.stderr or "", + cmdline=proc.args, + ) + log.debug("Importing keypair...:\n%s", ret) + + agent_started = True + + yield _gpg_homedir + finally: + if agent_started: + try: + cmd = ["gpg-connect-agent", "--homedir", str(_gpg_homedir)] + proc = subprocess.run( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + check=True, + universal_newlines=True, + input="KILLAGENT", + ) + ret = ProcessResult( + returncode=proc.returncode, + stdout=proc.stdout, + stderr=proc.stderr or "", + cmdline=proc.args, + ) + log.debug("Killed gpg-agent...\n%s", ret) + except (OSError, subprocess.CalledProcessError): + log.debug("No need to kill: old gnupg doesn't start the agent.") + shutil.rmtree(str(_gpg_homedir), ignore_errors=True) + + +@pytest.fixture(scope="module") +def pillar_setup(base_env_pillar_tree_root_dir, gpg_pillar_yaml, salt_minion): + """ + Setup gpg pillar + """ + saltutil_contents = f""" + saltutil: {{{{ salt["saltutil.runner"]("mine.get", tgt="{salt_minion.id}", fun="test.ping") | json }}}} + """ + top_file_contents = """ + base: + '*': + - gpg + - saltutil + """ + with pytest.helpers.temp_file( + "top.sls", top_file_contents, base_env_pillar_tree_root_dir + ), pytest.helpers.temp_file( + "gpg.sls", gpg_pillar_yaml, base_env_pillar_tree_root_dir + ), pytest.helpers.temp_file( + "saltutil.sls", saltutil_contents, base_env_pillar_tree_root_dir + ): + yield + + +@pytest.mark.skip_if_binaries_missing("gpg") +@pytest.mark.usefixtures("pillar_setup", "gpg_homedir") +def test_gpg_pillar(salt_ssh_cli): + """ + Ensure that GPG-encrypted pillars can be decrypted, i.e. the + gpg_keydir should not be overridden. This is issue #60002, + which has the same cause as the one below. + """ + ret = salt_ssh_cli.run("pillar.items") + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert ret.data + _assert_gpg_pillar(ret.data) + + +def _assert_gpg_pillar(ret): + assert "secrets" in ret + assert "foo" in ret["secrets"] + assert "BEGIN PGP MESSAGE" not in ret["secrets"]["foo"] + assert ret["secrets"]["foo"] == "supersecret" + assert "_errors" not in ret + + +@pytest.mark.usefixtures("pillar_setup") +def test_saltutil_runner(salt_ssh_cli, salt_minion): + """ + Ensure that during pillar compilation, the cache dir is not + overridden. For a history, see PR #50489 and issue #36796, + notice that the initial description is probably unrelated + to this. + """ + ret = salt_ssh_cli.run("pillar.items") + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert ret.data + _assert_saltutil_runner_pillar(ret.data, salt_minion.id) + + +def _assert_saltutil_runner_pillar(ret, salt_minion_id): + assert "saltutil" in ret + assert isinstance(ret["saltutil"], dict) + assert ret["saltutil"] + assert salt_minion_id in ret["saltutil"] + assert ret["saltutil"][salt_minion_id] is True + assert "_errors" not in ret + + +@pytest.mark.skip_if_binaries_missing("gpg") +@pytest.mark.usefixtures("pillar_setup", "gpg_homedir") +def test_gpg_pillar_orch(salt_ssh_cli, salt_run_cli, gpg_homedir): + """ + Ensure that GPG-encrypted pillars can be decrypted when Salt-SSH is + called during an orchestration or via saltutil.cmd. + This is issue #65670. + """ + # Use salt_run_cli since the config paths are different between + # test master and test minion. + ret = salt_run_cli.run( + "salt.cmd", + "saltutil.cmd", + salt_ssh_cli.target_host, + "pillar.items", + ssh=True, + ignore_host_keys=True, + roster_file=str(salt_ssh_cli.roster_file), + ssh_priv=str(salt_ssh_cli.client_key), + ) + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert ret.data + _assert_gpg_pillar(ret.data[salt_ssh_cli.target_host]["return"]) + + +@pytest.mark.usefixtures("pillar_setup") +def test_saltutil_runner_orch(salt_ssh_cli, salt_run_cli, salt_minion): + """ + Ensure that runner calls in the pillar succeed when Salt-SSH is + called during an orchestration or via saltutil.cmd. + This is a variant of issue #65670. + """ + # Use salt_run_cli since the config paths are different between + # test master and test minion. + ret = salt_run_cli.run( + "salt.cmd", + "saltutil.cmd", + salt_ssh_cli.target_host, + "pillar.items", + ssh=True, + ignore_host_keys=True, + roster_file=str(salt_ssh_cli.roster_file), + ssh_priv=str(salt_ssh_cli.client_key), + ) + assert ret.returncode == 0 + assert isinstance(ret.data, dict) + assert ret.data + _assert_saltutil_runner_pillar( + ret.data[salt_ssh_cli.target_host]["return"], salt_minion.id + ) diff --git a/tests/pytests/integration/ssh/test_pre_flight.py b/tests/pytests/integration/ssh/test_pre_flight.py new file mode 100644 index 000000000000..c2fc14094e83 --- /dev/null +++ b/tests/pytests/integration/ssh/test_pre_flight.py @@ -0,0 +1,307 @@ +""" +Test for ssh_pre_flight roster option +""" + +try: + import grp + import pwd +except ImportError: + # windows stacktraces on import of these modules + pass +import os +import pathlib +import shutil +import subprocess + +import pytest +import yaml +from saltfactories.utils import random_string + +import salt.utils.files + +pytestmark = [ + pytest.mark.skip_on_windows(reason="Salt-ssh not available on Windows"), +] + + +def _custom_roster(roster_file, roster_data): + with salt.utils.files.fopen(roster_file, "r") as fp: + data = salt.utils.yaml.safe_load(fp) + for key, item in roster_data.items(): + data["localhost"][key] = item + with salt.utils.files.fopen(roster_file, "w") as fp: + yaml.safe_dump(data, fp) + + +@pytest.fixture +def _create_roster(salt_ssh_roster_file, tmp_path): + thin_dir = tmp_path / "thin-dir" + ret = { + "roster": salt_ssh_roster_file, + "data": { + "ssh_pre_flight": str(tmp_path / "ssh_pre_flight.sh"), + }, + "test_script": str(tmp_path / "test-pre-flight-script-worked.txt"), + "thin_dir": str(thin_dir), + } + + with salt.utils.files.fopen(salt_ssh_roster_file, "r") as fp: + data = salt.utils.yaml.safe_load(fp) + + pre_flight_script = ret["data"]["ssh_pre_flight"] + data["localhost"]["ssh_pre_flight"] = pre_flight_script + data["localhost"]["thin_dir"] = ret["thin_dir"] + with salt.utils.files.fopen(salt_ssh_roster_file, "w") as fp: + yaml.safe_dump(data, fp) + + with salt.utils.files.fopen(pre_flight_script, "w") as fp: + fp.write("touch {}".format(ret["test_script"])) + + try: + yield ret + finally: + if thin_dir.exists(): + shutil.rmtree(thin_dir) + + +@pytest.mark.slow_test +def test_ssh_pre_flight(salt_ssh_cli, caplog, _create_roster): + """ + test ssh when ssh_pre_flight is set ensure the script runs successfully + """ + ret = salt_ssh_cli.run("test.ping") + assert ret.returncode == 0 + + assert pathlib.Path(_create_roster["test_script"]).exists() + + +@pytest.mark.slow_test +def test_ssh_run_pre_flight(salt_ssh_cli, _create_roster): + """ + test ssh when --pre-flight is passed to salt-ssh to ensure the script runs successfully + """ + # make sure we previously ran a command so the thin dir exists + ret = salt_ssh_cli.run("test.ping") + assert pathlib.Path(_create_roster["test_script"]).exists() + + # Now remeove the script to ensure pre_flight doesn't run + # without --pre-flight + pathlib.Path(_create_roster["test_script"]).unlink() + + assert salt_ssh_cli.run("test.ping").returncode == 0 + assert not pathlib.Path(_create_roster["test_script"]).exists() + + # Now ensure + ret = salt_ssh_cli.run("test.ping", "--pre-flight") + assert ret.returncode == 0 + assert pathlib.Path(_create_roster["test_script"]).exists() + + +@pytest.mark.slow_test +def test_ssh_run_pre_flight_args(salt_ssh_cli, _create_roster): + """ + test ssh when --pre-flight is passed to salt-ssh + to ensure the script runs successfully passing some args + """ + _custom_roster(salt_ssh_cli.roster_file, {"ssh_pre_flight_args": "foobar test"}) + # Create pre_flight script that accepts args + test_script = _create_roster["test_script"] + test_script_1 = pathlib.Path(test_script + "-foobar") + test_script_2 = pathlib.Path(test_script + "-test") + with salt.utils.files.fopen(_create_roster["data"]["ssh_pre_flight"], "w") as fp: + fp.write( + f""" + touch {str(test_script)}-$1 + touch {str(test_script)}-$2 + """ + ) + ret = salt_ssh_cli.run("test.ping") + assert ret.returncode == 0 + assert test_script_1.exists() + assert test_script_2.exists() + test_script_1.unlink() + test_script_2.unlink() + + ret = salt_ssh_cli.run("test.ping") + assert ret.returncode == 0 + assert not test_script_1.exists() + assert not test_script_2.exists() + + ret = salt_ssh_cli.run("test.ping", "--pre-flight") + assert ret.returncode == 0 + assert test_script_1.exists() + assert test_script_2.exists() + + +@pytest.mark.slow_test +def test_ssh_run_pre_flight_args_prevent_injection( + salt_ssh_cli, _create_roster, tmp_path +): + """ + test ssh when --pre-flight is passed to salt-ssh + and evil arguments are used in order to produce shell injection + """ + injected_file = tmp_path / "injection" + _custom_roster( + salt_ssh_cli.roster_file, + {"ssh_pre_flight_args": f"foobar; echo injected > {str(injected_file)}"}, + ) + # Create pre_flight script that accepts args + test_script = _create_roster["test_script"] + test_script_1 = pathlib.Path(test_script + "-echo") + test_script_2 = pathlib.Path(test_script + "-foobar;") + with salt.utils.files.fopen(_create_roster["data"]["ssh_pre_flight"], "w") as fp: + fp.write( + f""" + touch {str(test_script)}-$1 + touch {str(test_script)}-$2 + """ + ) + + # make sure we previously ran a command so the thin dir exists + ret = salt_ssh_cli.run("test.ping") + assert ret.returncode == 0 + assert test_script_1.exists() + assert test_script_2.exists() + test_script_1.unlink() + test_script_2.unlink() + assert not injected_file.is_file() + + ret = salt_ssh_cli.run("test.ping", "--pre-flight") + assert ret.returncode == 0 + + assert test_script_1.exists() + assert test_script_2.exists() + assert ( + not injected_file.is_file() + ), "File injection suceeded. This shouldn't happend" + + +@pytest.mark.flaky(max_runs=4) +@pytest.mark.slow_test +def test_ssh_run_pre_flight_failure(salt_ssh_cli, _create_roster): + """ + test ssh_pre_flight when there is a failure + in the script. + """ + with salt.utils.files.fopen(_create_roster["data"]["ssh_pre_flight"], "w") as fp_: + fp_.write("exit 2") + + ret = salt_ssh_cli.run("test.ping", "--pre-flight") + assert ret.data["retcode"] == 2 + + +@pytest.fixture +def account(): + username = random_string("test-account-", uppercase=False) + with pytest.helpers.create_account(username=username) as account: + yield account + + +@pytest.mark.slow_test +def test_ssh_pre_flight_script(salt_ssh_cli, caplog, _create_roster, tmp_path, account): + """ + Test to ensure user cannot create and run a script + with the expected pre_flight script path on target. + """ + try: + script = pathlib.Path.home() / "hacked" + tmp_preflight = pathlib.Path("/tmp", "ssh_pre_flight.sh") + tmp_preflight.write_text(f"touch {script}") + os.chown(tmp_preflight, account.info.uid, account.info.gid) + ret = salt_ssh_cli.run("test.ping") + assert not script.is_file() + assert ret.returncode == 0 + assert ret.stdout == '{\n"localhost": true\n}\n' + finally: + for _file in [script, tmp_preflight]: + if _file.is_file(): + _file.unlink() + + +def demote(user_uid, user_gid): + def result(): + # os.setgid does not remove group membership, so we remove them here so they are REALLY non-root + os.setgroups([]) + os.setgid(user_gid) + os.setuid(user_uid) + + return result + + +@pytest.mark.slow_test +def test_ssh_pre_flight_perms(salt_ssh_cli, caplog, _create_roster, account): + """ + Test to ensure standard user cannot run pre flight script + on target when user sets wrong permissions (777) on + ssh_pre_flight script. + """ + try: + script = pathlib.Path("/tmp", "itworked") + preflight = pathlib.Path("/ssh_pre_flight.sh") + preflight.write_text(f"touch {str(script)}") + tmp_preflight = pathlib.Path("/tmp", preflight.name) + + _custom_roster(salt_ssh_cli.roster_file, {"ssh_pre_flight": str(preflight)}) + preflight.chmod(0o0777) + run_script = pathlib.Path("/run_script") + run_script.write_text( + f""" + x=1 + while [ $x -le 200000 ]; do + SCRIPT=`bash {str(tmp_preflight)} 2> /dev/null; echo $?` + if [ ${{SCRIPT}} -eq 0 ]; then + break + fi + x=$(( $x + 1 )) + done + """ + ) + run_script.chmod(0o0777) + # pylint: disable=W1509 + ret = subprocess.Popen( + ["sh", f"{run_script}"], + preexec_fn=demote(account.info.uid, account.info.gid), + stdout=None, + stderr=None, + stdin=None, + universal_newlines=True, + ) + # pylint: enable=W1509 + ret = salt_ssh_cli.run("test.ping") + assert ret.returncode == 0 + + # Lets make sure a different user other than root + # Didn't run the script + assert os.stat(script).st_uid != account.info.uid + assert script.is_file() + finally: + for _file in [script, preflight, tmp_preflight, run_script]: + if _file.is_file(): + _file.unlink() + + +@pytest.mark.slow_test +def test_ssh_run_pre_flight_target_file_perms(salt_ssh_cli, _create_roster, tmp_path): + """ + test ssh_pre_flight to ensure the target pre flight script + has the correct perms + """ + perms_file = tmp_path / "perms" + with salt.utils.files.fopen(_create_roster["data"]["ssh_pre_flight"], "w") as fp_: + fp_.write( + f""" + SCRIPT_NAME=$0 + stat -L -c "%a %G %U" $SCRIPT_NAME > {perms_file} + """ + ) + + ret = salt_ssh_cli.run("test.ping", "--pre-flight") + assert ret.returncode == 0 + with salt.utils.files.fopen(perms_file) as fp: + data = fp.read() + assert data.split()[0] == "600" + uid = os.getuid() + gid = os.getgid() + assert data.split()[1] == grp.getgrgid(gid).gr_name + assert data.split()[2] == pwd.getpwuid(uid).pw_name diff --git a/tests/pytests/integration/ssh/test_saltcheck.py b/tests/pytests/integration/ssh/test_saltcheck.py index 510688502650..a19fe9f12702 100644 --- a/tests/pytests/integration/ssh/test_saltcheck.py +++ b/tests/pytests/integration/ssh/test_saltcheck.py @@ -23,6 +23,7 @@ def test_saltcheck_run_test(salt_ssh_cli): assert ret.data["status"] == "Pass" +@pytest.mark.skip_on_aarch64 def test_saltcheck_state(salt_ssh_cli): """ saltcheck.run_state_tests diff --git a/tests/pytests/integration/ssh/test_ssh_setup.py b/tests/pytests/integration/ssh/test_ssh_setup.py index eddf31caccdf..00e7e6394fff 100644 --- a/tests/pytests/integration/ssh/test_ssh_setup.py +++ b/tests/pytests/integration/ssh/test_ssh_setup.py @@ -154,6 +154,7 @@ def salt_ssh_cli( ) +@pytest.mark.flaky_jail def test_setup(salt_ssh_cli, ssh_container_name, ssh_sub_container_name, ssh_password): """ Test salt-ssh setup works diff --git a/tests/pytests/integration/ssh/test_state.py b/tests/pytests/integration/ssh/test_state.py deleted file mode 100644 index 9d3a38d2c9f5..000000000000 --- a/tests/pytests/integration/ssh/test_state.py +++ /dev/null @@ -1,222 +0,0 @@ -import json - -import pytest - -pytestmark = [ - pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), -] - - -@pytest.fixture(scope="module") -def state_tree(base_env_state_tree_root_dir): - top_file = """ - {%- from "map.jinja" import abc with context %} - base: - 'localhost': - - basic - '127.0.0.1': - - basic - """ - map_file = """ - {%- set abc = "def" %} - """ - state_file = """ - {%- from "map.jinja" import abc with context %} - Ok with {{ abc }}: - test.succeed_without_changes - """ - top_tempfile = pytest.helpers.temp_file( - "top.sls", top_file, base_env_state_tree_root_dir - ) - map_tempfile = pytest.helpers.temp_file( - "map.jinja", map_file, base_env_state_tree_root_dir - ) - state_tempfile = pytest.helpers.temp_file( - "test.sls", state_file, base_env_state_tree_root_dir - ) - with top_tempfile, map_tempfile, state_tempfile: - yield - - -@pytest.fixture(scope="module") -def state_tree_dir(base_env_state_tree_root_dir): - """ - State tree with files to test salt-ssh - when the map.jinja file is in another directory - """ - top_file = """ - {%- from "test/map.jinja" import abc with context %} - base: - 'localhost': - - test - '127.0.0.1': - - test - """ - map_file = """ - {%- set abc = "def" %} - """ - state_file = """ - {%- from "test/map.jinja" import abc with context %} - - Ok with {{ abc }}: - test.succeed_without_changes - """ - top_tempfile = pytest.helpers.temp_file( - "top.sls", top_file, base_env_state_tree_root_dir - ) - map_tempfile = pytest.helpers.temp_file( - "test/map.jinja", map_file, base_env_state_tree_root_dir - ) - state_tempfile = pytest.helpers.temp_file( - "test.sls", state_file, base_env_state_tree_root_dir - ) - - with top_tempfile, map_tempfile, state_tempfile: - yield - - -@pytest.mark.slow_test -def test_state_with_import(salt_ssh_cli, state_tree): - """ - verify salt-ssh can use imported map files in states - """ - ret = salt_ssh_cli.run("state.sls", "test") - assert ret.returncode == 0 - assert ret.data - - -@pytest.mark.parametrize( - "ssh_cmd", - [ - "state.sls", - "state.highstate", - "state.apply", - "state.show_top", - "state.show_highstate", - "state.show_low_sls", - "state.show_lowstate", - "state.sls_id", - "state.show_sls", - "state.top", - ], -) -@pytest.mark.slow_test -def test_state_with_import_dir(salt_ssh_cli, state_tree_dir, ssh_cmd): - """ - verify salt-ssh can use imported map files in states - when the map files are in another directory outside of - sls files importing them. - """ - if ssh_cmd in ("state.sls", "state.show_low_sls", "state.show_sls"): - ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "test") - elif ssh_cmd == "state.top": - ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "top.sls") - elif ssh_cmd == "state.sls_id": - ret = salt_ssh_cli.run("-w", "-t", ssh_cmd, "Ok with def", "test") - else: - ret = salt_ssh_cli.run("-w", "-t", ssh_cmd) - assert ret.returncode == 0 - if ssh_cmd == "state.show_top": - assert ret.data == {"base": ["test", "master_tops_test"]} or {"base": ["test"]} - elif ssh_cmd in ("state.show_highstate", "state.show_sls"): - assert ret.data == { - "Ok with def": { - "__sls__": "test", - "__env__": "base", - "test": ["succeed_without_changes", {"order": 10000}], - } - } - elif ssh_cmd in ("state.show_low_sls", "state.show_lowstate", "state.show_sls"): - assert ret.data == [ - { - "state": "test", - "name": "Ok with def", - "__sls__": "test", - "__env__": "base", - "__id__": "Ok with def", - "order": 10000, - "fun": "succeed_without_changes", - } - ] - else: - assert ret.data["test_|-Ok with def_|-Ok with def_|-succeed_without_changes"][ - "result" - ] - assert ret.data - - -@pytest.fixture -def nested_state_tree(base_env_state_tree_root_dir, tmp_path): - top_file = """ - base: - 'localhost': - - basic - '127.0.0.1': - - basic - """ - state_file = """ - /{}/file.txt: - file.managed: - - source: salt://foo/file.jinja - - template: jinja - """.format( - tmp_path - ) - file_jinja = """ - {% from 'foo/map.jinja' import comment %}{{ comment }} - """ - map_file = """ - {% set comment = "blah blah" %} - """ - statedir = base_env_state_tree_root_dir / "foo" - top_tempfile = pytest.helpers.temp_file( - "top.sls", top_file, base_env_state_tree_root_dir - ) - map_tempfile = pytest.helpers.temp_file("map.jinja", map_file, statedir) - file_tempfile = pytest.helpers.temp_file("file.jinja", file_jinja, statedir) - state_tempfile = pytest.helpers.temp_file("init.sls", state_file, statedir) - - with top_tempfile, map_tempfile, state_tempfile, file_tempfile: - yield - - -@pytest.mark.slow_test -def test_state_with_import_from_dir(salt_ssh_cli, nested_state_tree): - """ - verify salt-ssh can use imported map files in states - """ - ret = salt_ssh_cli.run( - "--extra-filerefs=salt://foo/map.jinja", "state.apply", "foo" - ) - assert ret.returncode == 0 - assert ret.data - - -@pytest.mark.slow_test -def test_state_low(salt_ssh_cli): - """ - test state.low with salt-ssh - """ - ret = salt_ssh_cli.run( - "state.low", '{"state": "cmd", "fun": "run", "name": "echo blah"}' - ) - assert ( - json.loads(ret.stdout)["localhost"]["cmd_|-echo blah_|-echo blah_|-run"][ - "changes" - ]["stdout"] - == "blah" - ) - - -@pytest.mark.slow_test -def test_state_high(salt_ssh_cli): - """ - test state.high with salt-ssh - """ - ret = salt_ssh_cli.run("state.high", '{"echo blah": {"cmd": ["run"]}}') - assert ( - json.loads(ret.stdout)["localhost"]["cmd_|-echo blah_|-echo blah_|-run"][ - "changes" - ]["stdout"] - == "blah" - ) diff --git a/tests/pytests/integration/ssh/test_terraform.py b/tests/pytests/integration/ssh/test_terraform.py new file mode 100644 index 000000000000..12194a48baea --- /dev/null +++ b/tests/pytests/integration/ssh/test_terraform.py @@ -0,0 +1,92 @@ +import textwrap + +import pytest + +import salt.utils.platform +from tests.support.runtests import RUNTIME_VARS + +pytestmark = [ + pytest.mark.skip_on_windows(reason="salt-ssh not available on Windows"), + pytest.mark.slow_test, +] + + +@pytest.fixture(scope="module") +def minion_id(): + return "terraform_ssh_minion" + + +@pytest.fixture(scope="module") +def terraform_roster_file(sshd_server, salt_master, tmp_path_factory, minion_id): + darwin_addon = "" + if salt.utils.platform.is_darwin(): + darwin_addon = ',\n "set_path": "$PATH:/usr/local/bin/"\n' + roster_contents = textwrap.dedent( + """ {{ + "version": 4, + "terraform_version": "1.4.3", + "serial": 1, + "outputs": {{}}, + "resources": [ + {{ + "mode": "managed", + "type": "salt_host", + "name": "{minion}", + "instances": [ + {{ + "schema_version": 0, + "attributes": {{ + "cmd_umask": null, + "host": "localhost", + "id": "{minion}", + "minion_opts": null, + "passwd": "", + "port": {port}, + "priv": null, + "salt_id": "{minion}", + "sudo": null, + "sudo_user": null, + "thin_dir": null, + "timeout": null, + "tty": null, + "user": "{user}"{darwin_addon} + }} + }} + ] + }} + ], + "check_results": null + }} + """ + ).format( + minion=minion_id, + port=sshd_server.listen_port, + user=RUNTIME_VARS.RUNNING_TESTS_USER, + darwin_addon=darwin_addon, + ) + roster_file = tmp_path_factory.mktemp("terraform_roster") / "terraform.tfstate" + roster_file.write_text(roster_contents) + yield roster_file + roster_file.unlink() + + +@pytest.fixture(scope="module") +def salt_ssh_cli(salt_master, terraform_roster_file, sshd_config_dir): + """ + The ``salt-ssh`` CLI as a fixture against the running master + """ + assert salt_master.is_running() + return salt_master.salt_ssh_cli( + roster_file=terraform_roster_file, + target_host="*", + client_key=str(sshd_config_dir / "client_key"), + base_script_args=["--ignore-host-keys"], + ) + + +def test_terraform_roster(salt_ssh_cli, minion_id): + """ + Test that the terraform roster operates as intended + """ + ret = salt_ssh_cli.run("--roster=terraform", "test.ping") + assert ret.data.get(minion_id) is True diff --git a/tests/pytests/integration/states/test_file.py b/tests/pytests/integration/states/test_file.py index 9115818bf7ec..ec8aafb299d3 100644 --- a/tests/pytests/integration/states/test_file.py +++ b/tests/pytests/integration/states/test_file.py @@ -1070,7 +1070,7 @@ def test_recurse( "{}.sls".format(sls_name), sls_contents ) - with sls_tempfile, test_tempdir: + with sls_tempfile: for _dir in "test1", "test2", "test3": test_tempdir.joinpath(_dir).mkdir(parents=True, exist_ok=True) @@ -1117,7 +1117,7 @@ def test_recurse_keep_symlinks_in_fileserver_root( "{}.sls".format(sls_name), sls_contents ) - with sls_tempfile, test_tempdir: + with sls_tempfile: for _dir in "test1", "test2", "test3": test_tempdir.joinpath(_dir).mkdir(parents=True, exist_ok=True) @@ -1169,7 +1169,7 @@ def test_recurse_keep_symlinks_outside_fileserver_root( "{}.sls".format(sls_name), sls_contents ) - with sls_tempfile, test_tempdir: + with sls_tempfile: for _dir in "test1", "test2", "test3": test_tempdir.joinpath(_dir).mkdir(parents=True, exist_ok=True) @@ -1278,3 +1278,32 @@ def test_issue_62611( state_run = next(iter(ret.data.values())) assert state_run["name"] == "echo MEEP MOOP" assert state_run["result"] is True + + +def test_contents_file(salt_master, salt_call_cli, tmp_path): + """ + test calling file.managed multiple times + with salt-call + """ + target_path = tmp_path / "add-contents-file.txt" + sls_name = "file-contents" + sls_contents = """ + add_contents_file_sls: + file.managed: + - name: {} + - contents: 1234 + """.format( + target_path + ) + sls_tempfile = salt_master.state_tree.base.temp_file( + "{}.sls".format(sls_name), sls_contents + ) + with sls_tempfile: + for i in range(1, 4): + ret = salt_call_cli.run("state.sls", sls_name) + assert ret.returncode == 0 + assert ret.data + state_run = next(iter(ret.data.values())) + assert state_run["result"] is True + # Check to make sure the file was created + assert target_path.is_file() diff --git a/tests/pytests/integration/states/test_include.py b/tests/pytests/integration/states/test_include.py new file mode 100644 index 000000000000..f814328c5e4d --- /dev/null +++ b/tests/pytests/integration/states/test_include.py @@ -0,0 +1,40 @@ +""" +Integration tests for the jinja includes in states +""" +import logging + +import pytest + +log = logging.getLogger(__name__) + + +@pytest.mark.slow_test +def test_issue_64111(salt_master, salt_minion, salt_call_cli): + # This needs to be an integration test. A functional test does not trigger + # the issue fixed. + + macros_jinja = """ + {% macro a_jinja_macro(arg) -%} + {{ arg }} + {%- endmacro %} + """ + + init_sls = """ + include: + - common.file1 + """ + + file1_sls = """ + {% from 'common/macros.jinja' import a_jinja_macro with context %} + + a state id: + cmd.run: + - name: echo {{ a_jinja_macro("hello world") }} + """ + tf = salt_master.state_tree.base.temp_file + + with tf("common/macros.jinja", macros_jinja): + with tf("common/init.sls", init_sls): + with tf("common/file1.sls", file1_sls): + ret = salt_call_cli.run("state.apply", "common") + assert ret.returncode == 0 diff --git a/tests/pytests/integration/states/test_state_test.py b/tests/pytests/integration/states/test_state_test.py new file mode 100644 index 000000000000..88e12ac32935 --- /dev/null +++ b/tests/pytests/integration/states/test_state_test.py @@ -0,0 +1,79 @@ +import logging + +log = logging.getLogger(__name__) + + +def test_issue_62590(salt_master, salt_minion, salt_cli): + + statepy = """ + # _states/test2.py + import logging + log = logging.getLogger(__name__) + + def call_another(name, m_name, **kwargs): + ret = __states__[m_name](name, **kwargs) + log.info(f'{__opts__["test"]}: {ret}') + return ret + """ + statesls = """ + run indirect: + test2.call_another: + - m_name: test.succeed_with_changes + + run prereq: + test2.call_another: + - m_name: test.succeed_with_changes + + nop: + test.nop: + - prereq: + - run prereq + """ + with salt_master.state_tree.base.temp_file( + "_states/test2.py", statepy + ), salt_master.state_tree.base.temp_file("test_62590.sls", statesls): + ret = salt_cli.run("saltutil.sync_all", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + ret = salt_cli.run("state.apply", "test_62590", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert "Success!" == ret.data["test_|-nop_|-nop_|-nop"]["comment"] + + +def test_failing_sls(salt_master, salt_minion, salt_cli, caplog): + """ + Test when running state.sls and the state fails. + When the master stores the job and attempts to send + an event a KeyError was previously being logged. + This test ensures we do not log an error when + attempting to send an event about a failing state. + """ + statesls = """ + test_state: + test.fail_without_changes: + - name: "bla" + """ + with salt_master.state_tree.base.temp_file("test_failure.sls", statesls): + ret = salt_cli.run("state.sls", "test_failure", minion_tgt=salt_minion.id) + for message in caplog.messages: + assert "Event iteration failed with" not in message + + +def test_failing_sls_compound(salt_master, salt_minion, salt_cli, caplog): + """ + Test when running state.sls in a compound command and the state fails. + When the master stores the job and attempts to send + an event a KeyError was previously being logged. + This test ensures we do not log an error when + attempting to send an event about a failing state. + """ + statesls = """ + test_state: + test.fail_without_changes: + - name: "bla" + """ + with salt_master.state_tree.base.temp_file("test_failure.sls", statesls): + ret = salt_cli.run( + "state.sls,cmd.run", "test_failure,ls", minion_tgt=salt_minion.id + ) + for message in caplog.messages: + assert "Event iteration failed with" not in message diff --git a/tests/pytests/integration/states/test_x509_v2.py b/tests/pytests/integration/states/test_x509_v2.py index 9a1c09bb8bd5..b13a2a8922a4 100644 --- a/tests/pytests/integration/states/test_x509_v2.py +++ b/tests/pytests/integration/states/test_x509_v2.py @@ -46,6 +46,65 @@ def x509_pkidir(tmp_path_factory): shutil.rmtree(str(_x509_pkidir), ignore_errors=True) +@pytest.fixture(params=[{}]) +def existing_privkey(x509_salt_call_cli, request, tmp_path): + pk_file = tmp_path / "priv.key" + pk_args = {"name": str(pk_file)} + pk_args.update(request.param) + ret = x509_salt_call_cli.run("state.single", "x509.private_key_managed", **pk_args) + assert ret.returncode == 0 + assert pk_file.exists() + yield pk_args["name"] + + +def test_file_managed_does_not_run_in_test_mode_after_x509_v2_invocation_without_changes( + x509_salt_master, x509_salt_call_cli, tmp_path, existing_privkey +): + """ + The x509_v2 state module tries to workaround issue #62590 (Test mode does + not propagate to __states__ when using prereq) by invoking the ``state.single`` + execution module with an explicit test parameter. In some cases, this seems + to trigger another bug: The file module always runs in test mode afterwards. + This seems to be the case when the x509_v2 state module does not report changes + after having been invoked at least once before, until another x509_v2 call results + in a ``file.managed`` call without test mode. + Issue #64195. + """ + new_privkey = tmp_path / "new_privkey" + new_file = tmp_path / "new_file" + assert not new_file.exists() + state = f""" + # The result of this call is irrelevant, just that it exists + Some private key is present: + x509.private_key_managed: + - name: {new_privkey} + # This single call without changes does not trigger the bug on its own + Another private key is (already) present: + x509.private_key_managed: + - name: {existing_privkey} + Subsequent file.managed call should not run in test mode: + file.managed: + - name: {new_file} + - contents: foo + - require: + - Another private key is (already) present + """ + with x509_salt_master.state_tree.base.temp_file("file_managed_test.sls", state): + ret = x509_salt_call_cli.run("state.apply", "file_managed_test") + assert ret.returncode == 0 + assert ret.data + x509_res = next(ret.data[x] for x in ret.data if x.startswith("x509_|-Another")) + assert x509_res["result"] is True + assert not x509_res["changes"] + file_res = next( + ret.data[x] for x in ret.data if x.startswith("file_|-Subsequent") + ) + assert file_res["result"] is True + assert file_res["changes"] + assert new_file.exists() + assert new_file.read_text() == "foo\n" + + @pytest.fixture(scope="module", autouse=True) def x509_data( x509_pkidir, @@ -607,6 +666,7 @@ def test_privkey_new_with_prereq(x509_salt_call_cli, tmp_path): assert not _belongs_to(cert_new, pk_cur) +@pytest.mark.skip_on_fips_enabled_platform @pytest.mark.usefixtures("privkey_new_pkcs12") @pytest.mark.skipif( CRYPTOGRAPHY_VERSION[0] < 36, diff --git a/pkg/tests/__init__.py b/tests/pytests/pkg/__init__.py similarity index 100% rename from pkg/tests/__init__.py rename to tests/pytests/pkg/__init__.py diff --git a/tests/pytests/pkg/conftest.py b/tests/pytests/pkg/conftest.py new file mode 100644 index 000000000000..048ad7a238db --- /dev/null +++ b/tests/pytests/pkg/conftest.py @@ -0,0 +1,471 @@ +import logging +import os +import pathlib +import shutil +import subprocess +import sys + +import pytest +import yaml +from pytestskipmarkers.utils import platform +from saltfactories.utils import random_string + +import salt.config +from tests.conftest import CODE_DIR +from tests.support.pkg import ApiRequest, SaltMaster, SaltMasterWindows, SaltPkgInstall + +log = logging.getLogger(__name__) + +# Variable defining a FIPS test run or not +FIPS_TESTRUN = os.environ.get("FIPS_TESTRUN", "0") == "1" + + +@pytest.fixture(scope="session") +def version(install_salt): + """ + get version number from artifact + """ + return install_salt.version + + +@pytest.fixture(scope="session", autouse=True) +def _system_up_to_date( + grains, + shell, +): + if grains["os_family"] == "Debian": + ret = shell.run("apt", "update") + assert ret.returncode == 0 + env = os.environ.copy() + env["DEBIAN_FRONTEND"] = "noninteractive" + ret = shell.run( + "apt", + "upgrade", + "-y", + "-o", + "DPkg::Options::=--force-confdef", + "-o", + "DPkg::Options::=--force-confold", + env=env, + ) + assert ret.returncode == 0 + elif grains["os_family"] == "Redhat": + ret = shell.run("yum", "update", "-y") + assert ret.returncode == 0 + + +def pytest_addoption(parser): + """ + register argparse-style options and ini-style config values. + """ + test_selection_group = parser.getgroup("Tests Runtime Selection") + test_selection_group.addoption( + "--pkg-system-service", + default=False, + action="store_true", + help="Run the daemons as system services", + ) + test_selection_group.addoption( + "--upgrade", + default=False, + action="store_true", + help="Install previous version and then upgrade then run tests", + ) + test_selection_group.addoption( + "--downgrade", + default=False, + action="store_true", + help="Install current version and then downgrade to the previous version and run tests", + ) + test_selection_group.addoption( + "--no-install", + default=False, + action="store_true", + help="Do not install salt and use a previous install Salt package", + ) + test_selection_group.addoption( + "--no-uninstall", + default=False, + action="store_true", + help="Do not uninstall salt packages after test run is complete", + ) + test_selection_group.addoption( + "--classic", + default=False, + action="store_true", + help="Test an upgrade from the classic packages.", + ) + test_selection_group.addoption( + "--prev-version", + action="store", + help="Test an upgrade from the version specified.", + ) + test_selection_group.addoption( + "--use-prev-version", + action="store_true", + help="Tells the test suite to validate the version using the previous version (for downgrades)", + ) + test_selection_group.addoption( + "--download-pkgs", + default=False, + action="store_true", + help="Test package download tests", + ) + + +@pytest.hookimpl(tryfirst=True) +def pytest_runtest_setup(item): + """ + Fixtures injection based on markers or test skips based on CLI arguments + """ + if ( + str(item.fspath).startswith(str(pathlib.Path(__file__).parent / "download")) + and item.config.getoption("--download-pkgs") is False + ): + raise pytest.skip.Exception( + "The package download tests are disabled. Pass '--download-pkgs' to pytest " + "to enable them.", + _use_item_location=True, + ) + + +@pytest.fixture(scope="session") +def salt_factories_root_dir(request, tmp_path_factory): + root_dir = SaltPkgInstall.salt_factories_root_dir( + request.config.getoption("--pkg-system-service") + ) + if root_dir is not None: + yield root_dir + else: + if platform.is_darwin(): + root_dir = pathlib.Path("/tmp/salt-tests-tmpdir") + root_dir.mkdir(mode=0o777, parents=True, exist_ok=True) + else: + root_dir = tmp_path_factory.mktemp("salt-tests") + try: + yield root_dir + finally: + shutil.rmtree(str(root_dir), ignore_errors=True) + + +@pytest.fixture(scope="session") +def salt_factories_config(salt_factories_root_dir): + return { + "code_dir": CODE_DIR, + "root_dir": salt_factories_root_dir, + "system_service": True, + } + + +@pytest.fixture(scope="session") +def install_salt(request, salt_factories_root_dir): + with SaltPkgInstall( + conf_dir=salt_factories_root_dir / "etc" / "salt", + pkg_system_service=request.config.getoption("--pkg-system-service"), + upgrade=request.config.getoption("--upgrade"), + downgrade=request.config.getoption("--downgrade"), + no_uninstall=request.config.getoption("--no-uninstall"), + no_install=request.config.getoption("--no-install"), + classic=request.config.getoption("--classic"), + prev_version=request.config.getoption("--prev-version"), + use_prev_version=request.config.getoption("--use-prev-version"), + ) as fixture: + yield fixture + + +@pytest.fixture(scope="session") +def salt_factories(salt_factories, salt_factories_root_dir): + salt_factories.root_dir = salt_factories_root_dir + return salt_factories + + +@pytest.fixture(scope="session") +def salt_master(salt_factories, install_salt, pkg_tests_account): + """ + Start up a master + """ + if platform.is_windows(): + state_tree = "C:/salt/srv/salt" + pillar_tree = "C:/salt/srv/pillar" + elif platform.is_darwin(): + state_tree = "/opt/srv/salt" + pillar_tree = "/opt/srv/pillar" + else: + state_tree = "/srv/salt" + pillar_tree = "/srv/pillar" + + start_timeout = None + # Since the daemons are "packaged" with tiamat, the salt plugins provided + # by salt-factories won't be discovered. Provide the required `*_dirs` on + # the configuration so that they can still be used. + config_defaults = { + "engines_dirs": [ + str(salt_factories.get_salt_engines_path()), + ], + "log_handlers_dirs": [ + str(salt_factories.get_salt_log_handlers_path()), + ], + } + if platform.is_darwin(): + config_defaults["enable_fqdns_grains"] = False + config_overrides = { + "timeout": 30, + "file_roots": { + "base": [ + state_tree, + ] + }, + "pillar_roots": { + "base": [ + pillar_tree, + ] + }, + "rest_cherrypy": { + "port": 8000, + "disable_ssl": True, + }, + "netapi_enable_clients": ["local"], + "external_auth": { + "auto": { + pkg_tests_account.username: [ + ".*", + ], + }, + }, + "fips_mode": FIPS_TESTRUN, + "open_mode": True, + } + test_user = False + master_config = install_salt.config_path / "master" + if master_config.exists(): + with salt.utils.files.fopen(master_config) as fp: + data = yaml.safe_load(fp) + if data and "user" in data: + test_user = True + # We are testing a different user, so we need to test the system + # configs, or else permissions will not be correct. + config_overrides["user"] = data["user"] + config_overrides["log_file"] = salt.config.DEFAULT_MASTER_OPTS.get( + "log_file" + ) + config_overrides["root_dir"] = salt.config.DEFAULT_MASTER_OPTS.get( + "root_dir" + ) + config_overrides["key_logfile"] = salt.config.DEFAULT_MASTER_OPTS.get( + "key_logfile" + ) + config_overrides["pki_dir"] = salt.config.DEFAULT_MASTER_OPTS.get( + "pki_dir" + ) + config_overrides["api_logfile"] = salt.config.DEFAULT_API_OPTS.get( + "api_logfile" + ) + config_overrides["api_pidfile"] = salt.config.DEFAULT_API_OPTS.get( + "api_pidfile" + ) + # verify files were set with correct owner/group + verify_files = [ + pathlib.Path("/etc", "salt", "pki", "master"), + pathlib.Path("/etc", "salt", "master.d"), + pathlib.Path("/var", "cache", "salt", "master"), + ] + for _file in verify_files: + assert _file.owner() == "salt" + assert _file.group() == "salt" + + master_script = False + if platform.is_windows(): + if install_salt.classic: + master_script = True + if install_salt.relenv: + master_script = True + elif not install_salt.upgrade: + master_script = True + if ( + not install_salt.relenv + and install_salt.use_prev_version + and not install_salt.classic + ): + master_script = False + + if master_script: + salt_factories.system_service = False + salt_factories.generate_scripts = True + scripts_dir = salt_factories.root_dir / "Scripts" + scripts_dir.mkdir(exist_ok=True) + salt_factories.scripts_dir = scripts_dir + python_executable = install_salt.bin_dir / "Scripts" / "python.exe" + if install_salt.classic: + python_executable = install_salt.bin_dir / "python.exe" + if install_salt.relenv: + python_executable = install_salt.install_dir / "Scripts" / "python.exe" + salt_factories.python_executable = python_executable + factory = salt_factories.salt_master_daemon( + random_string("master-"), + defaults=config_defaults, + overrides=config_overrides, + factory_class=SaltMasterWindows, + salt_pkg_install=install_salt, + ) + salt_factories.system_service = True + else: + + if install_salt.classic and platform.is_darwin(): + os.environ["PATH"] += ":/opt/salt/bin" + + factory = salt_factories.salt_master_daemon( + random_string("master-"), + defaults=config_defaults, + overrides=config_overrides, + factory_class=SaltMaster, + salt_pkg_install=install_salt, + ) + factory.after_terminate(pytest.helpers.remove_stale_master_key, factory) + if test_user: + # Salt factories calls salt.utils.verify.verify_env + # which sets root perms on /etc/salt/pki/master since we are running + # the test suite as root, but we want to run Salt master as salt + # We ensure those permissions where set by the package earlier + subprocess.run( + [ + "chown", + "-R", + "salt:salt", + str(pathlib.Path("/etc", "salt", "pki", "master")), + ], + check=True, + ) + + if not platform.is_windows() and not platform.is_darwin(): + # The engines_dirs is created in .nox path. We need to set correct perms + # for the user running the Salt Master + check_paths = [state_tree, pillar_tree, CODE_DIR / ".nox"] + for path in check_paths: + if os.path.exists(path) is False: + continue + subprocess.run(["chown", "-R", "salt:salt", str(path)], check=False) + + with factory.started(start_timeout=start_timeout): + yield factory + + +@pytest.fixture(scope="session") +def salt_minion(salt_factories, salt_master, install_salt): + """ + Start up a minion + """ + start_timeout = None + minion_id = random_string("minion-") + # Since the daemons are "packaged" with tiamat, the salt plugins provided + # by salt-factories won't be discovered. Provide the required `*_dirs` on + # the configuration so that they can still be used. + config_defaults = { + "engines_dirs": salt_master.config["engines_dirs"].copy(), + "log_handlers_dirs": salt_master.config["log_handlers_dirs"].copy(), + } + if platform.is_darwin(): + config_defaults["enable_fqdns_grains"] = False + config_overrides = { + "id": minion_id, + "file_roots": salt_master.config["file_roots"].copy(), + "pillar_roots": salt_master.config["pillar_roots"].copy(), + "fips_mode": FIPS_TESTRUN, + "open_mode": True, + } + if platform.is_windows(): + config_overrides[ + "winrepo_dir" + ] = rf"{salt_factories.root_dir}\srv\salt\win\repo" + config_overrides[ + "winrepo_dir_ng" + ] = rf"{salt_factories.root_dir}\srv\salt\win\repo_ng" + config_overrides["winrepo_source_dir"] = r"salt://win/repo_ng" + + if install_salt.classic and platform.is_windows(): + salt_factories.python_executable = None + + if install_salt.classic and platform.is_darwin(): + os.environ["PATH"] += ":/opt/salt/bin" + + factory = salt_master.salt_minion_daemon( + minion_id, + overrides=config_overrides, + defaults=config_defaults, + ) + + # Salt factories calls salt.utils.verify.verify_env + # which sets root perms on /srv/salt and /srv/pillar since we are running + # the test suite as root, but we want to run Salt master as salt + if not platform.is_windows() and not platform.is_darwin(): + state_tree = "/srv/salt" + pillar_tree = "/srv/pillar" + check_paths = [state_tree, pillar_tree, CODE_DIR / ".nox"] + for path in check_paths: + if os.path.exists(path) is False: + continue + subprocess.run(["chown", "-R", "salt:salt", str(path)], check=False) + + factory.after_terminate( + pytest.helpers.remove_stale_minion_key, salt_master, factory.id + ) + with factory.started(start_timeout=start_timeout): + yield factory + + +@pytest.fixture(scope="module") +def salt_cli(salt_master): + return salt_master.salt_cli() + + +@pytest.fixture(scope="module") +def salt_key_cli(salt_master): + return salt_master.salt_key_cli() + + +@pytest.fixture(scope="module") +def salt_call_cli(salt_minion): + return salt_minion.salt_call_cli() + + +@pytest.fixture(scope="session") +def pkg_tests_account(): + with pytest.helpers.create_account() as account: + yield account + + +@pytest.fixture(scope="module") +def extras_pypath(): + extras_dir = "extras-{}.{}".format(*sys.version_info) + if platform.is_windows(): + return pathlib.Path( + os.getenv("ProgramFiles"), "Salt Project", "Salt", extras_dir + ) + elif platform.is_darwin(): + return pathlib.Path("/opt", "salt", extras_dir) + else: + return pathlib.Path("/opt", "saltstack", "salt", extras_dir) + + +@pytest.fixture(scope="module") +def extras_pypath_bin(extras_pypath): + return extras_pypath / "bin" + + +@pytest.fixture(scope="module") +def salt_api(salt_master, install_salt, extras_pypath): + """ + start up and configure salt_api + """ + shutil.rmtree(str(extras_pypath), ignore_errors=True) + start_timeout = None + factory = salt_master.salt_api_daemon() + with factory.started(start_timeout=start_timeout): + yield factory + + +@pytest.fixture(scope="module") +def api_request(pkg_tests_account, salt_api): + with ApiRequest( + port=salt_api.config["rest_cherrypy"]["port"], account=pkg_tests_account + ) as session: + yield session diff --git a/pkg/tests/integration/__init__.py b/tests/pytests/pkg/downgrade/__init__.py similarity index 100% rename from pkg/tests/integration/__init__.py rename to tests/pytests/pkg/downgrade/__init__.py diff --git a/tests/pytests/pkg/downgrade/test_salt_downgrade.py b/tests/pytests/pkg/downgrade/test_salt_downgrade.py new file mode 100644 index 000000000000..f6a8ef17a237 --- /dev/null +++ b/tests/pytests/pkg/downgrade/test_salt_downgrade.py @@ -0,0 +1,59 @@ +import packaging.version +import pytest +from pytestskipmarkers.utils import platform + + +def test_salt_downgrade(salt_call_cli, install_salt): + """ + Test an upgrade of Salt. + """ + if not install_salt.downgrade: + pytest.skip("Not testing a downgrade, do not run") + + is_downgrade_to_relenv = packaging.version.parse( + install_salt.prev_version + ) >= packaging.version.parse("3006.0") + + if is_downgrade_to_relenv: + original_py_version = install_salt.package_python_version() + + # Verify current install version is setup correctly and works + ret = salt_call_cli.run("test.version") + assert ret.returncode == 0 + assert packaging.version.parse(ret.data) == packaging.version.parse( + install_salt.artifact_version + ) + + # Test pip install before a downgrade + dep = "PyGithub==1.56.0" + install = salt_call_cli.run("--local", "pip.install", dep) + assert install.returncode == 0 + + # Verify we can use the module dependent on the installed package + repo = "https://github.com/saltstack/salt.git" + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr + + # Downgrade Salt to the previous version and test + install_salt.install(downgrade=True) + bin_file = "salt" + if platform.is_windows(): + if not is_downgrade_to_relenv: + bin_file = install_salt.install_dir / "salt-call.bat" + else: + bin_file = install_salt.install_dir / "salt-call.exe" + elif platform.is_darwin() and install_salt.classic: + bin_file = install_salt.bin_dir / "salt-call" + + ret = install_salt.proc.run(bin_file, "--version") + assert ret.returncode == 0 + assert packaging.version.parse( + ret.stdout.strip().split()[1] + ) < packaging.version.parse(install_salt.artifact_version) + + if is_downgrade_to_relenv: + new_py_version = install_salt.package_python_version() + if new_py_version == original_py_version: + # test pip install after a downgrade + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr diff --git a/pkg/tests/support/__init__.py b/tests/pytests/pkg/download/__init__.py similarity index 100% rename from pkg/tests/support/__init__.py rename to tests/pytests/pkg/download/__init__.py diff --git a/tests/pytests/pkg/download/test_pkg_download.py b/tests/pytests/pkg/download/test_pkg_download.py new file mode 100644 index 000000000000..0fa9089e77dd --- /dev/null +++ b/tests/pytests/pkg/download/test_pkg_download.py @@ -0,0 +1,559 @@ +""" +Test Salt Pkg Downloads +""" +import contextlib +import logging +import os +import pathlib +import shutil + +import packaging +import pytest +from pytestskipmarkers.utils import platform + +log = logging.getLogger(__name__) + + +def get_salt_test_commands(): + salt_release = get_salt_release() + if platform.is_windows(): + if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + salt_test_commands = [ + ["salt-call.exe", "--local", "test.versions"], + ["salt-call.exe", "--local", "grains.items"], + ["salt-minion.exe", "--version"], + ] + else: + salt_test_commands = [ + ["salt-call.bat", "--local", "test.versions"], + ["salt-call.bat", "--local", "grains.items"], + ["salt.bat", "--version"], + ["salt-master.bat", "--version"], + ["salt-minion.bat", "--version"], + ["salt-ssh.bat", "--version"], + ["salt-syndic.bat", "--version"], + ["salt-api.bat", "--version"], + ["salt-cloud.bat", "--version"], + ] + else: + salt_test_commands = [ + ["salt-call", "--local", "test.versions"], + ["salt-call", "--local", "grains.items"], + ["salt", "--version"], + ["salt-master", "--version"], + ["salt-minion", "--version"], + ["salt-ssh", "--version"], + ["salt-syndic", "--version"], + ["salt-api", "--version"], + ["salt-cloud", "--version"], + ] + return salt_test_commands + + +@pytest.fixture(scope="module") +def root_url(salt_release): + if os.environ.get("SALT_REPO_TYPE", "release") == "staging": + repo_domain = os.environ.get( + "SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io" + ) + else: + repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io") + if "rc" in salt_release: + salt_path = "salt_rc/salt" + else: + salt_path = "salt" + salt_repo_user = os.environ.get("SALT_REPO_USER") + if salt_repo_user: + log.info( + "SALT_REPO_USER: %s", + salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1], + ) + salt_repo_pass = os.environ.get("SALT_REPO_PASS") + if salt_repo_pass: + log.info( + "SALT_REPO_PASS: %s", + salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1], + ) + if salt_repo_user and salt_repo_pass: + repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}" + _root_url = f"https://{repo_domain}/{salt_path}/py3" + log.info("Repository Root URL: %s", _root_url) + return _root_url + + +@pytest.fixture(scope="module") +def package_type(): + return os.environ.get("DOWNLOAD_TEST_PACKAGE_TYPE") + + +def get_salt_release(): + salt_release = os.environ.get("SALT_RELEASE") + pkg_test_type = os.environ.get("PKG_TEST_TYPE", "install") + if salt_release is None: + if pkg_test_type == "download-pkgs": + log.warning( + "Setting salt release to 3006.0rc2 which is probably not what you want." + ) + salt_release = "3006.0rc2" + if pkg_test_type == "download-pkgs": + if packaging.version.parse(salt_release) < packaging.version.parse("3006.0rc1"): + log.warning(f"The salt release being tested, {salt_release!r} looks off.") + return salt_release + + +def get_repo_subpath_params(): + current_release = packaging.version.parse(get_salt_release()) + params = ["minor", current_release.major] + latest_env_var = os.environ.get("LATEST_SALT_RELEASE") + if latest_env_var is not None: + latest_release = packaging.version.parse(latest_env_var) + if current_release >= latest_release: + log.debug( + f"Running the tests for the latest release since {str(current_release)} >= {str(latest_release)}" + ) + params.append("latest") + return params + + +@pytest.fixture( + scope="module", + params=get_repo_subpath_params(), +) +def repo_subpath(request): + return request.param + + +@pytest.fixture(scope="module") +def gpg_key_name(salt_release): + if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + return "SALT-PROJECT-GPG-PUBKEY-2023.pub" + return "salt-archive-keyring.gpg" + + +@pytest.fixture(scope="module") +def salt_release(): + yield get_salt_release() + + +@pytest.fixture(scope="module") +def onedir_install_path(tmp_path_factory): + install_path = tmp_path_factory.mktemp("onedir_install") + yield install_path + shutil.rmtree(install_path, ignore_errors=True) + + +@pytest.fixture(scope="module") +def _setup_system( + grains, + shell, + root_url, + salt_release, + gpg_key_name, + repo_subpath, + package_type, + tmp_path_factory, + onedir_install_path, +): + downloads_path = tmp_path_factory.mktemp("downloads") + try: + # Windows is a special case, because sometimes we need to uninstall the packages + if grains["os_family"] == "Windows": + with setup_windows( + shell, + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + repo_subpath=repo_subpath, + package_type=package_type, + onedir_install_path=onedir_install_path, + ): + yield + else: + if grains["os_family"] == "MacOS": + setup_macos( + shell, + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + repo_subpath=repo_subpath, + package_type=package_type, + onedir_install_path=onedir_install_path, + ) + elif grains["os"] == "Amazon": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os"] == "Fedora": + setup_redhat_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os"] == "VMware Photon OS": + setup_redhat_family( + shell, + os_name="photon", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os_family"] == "RedHat": + setup_redhat_family( + shell, + os_name="redhat", + os_version=grains["osmajorrelease"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + ) + elif grains["os_family"] == "Debian": + setup_debian_family( + shell, + os_name=grains["os"].lower(), + os_version=grains["osrelease"], + os_codename=grains["oscodename"], + root_url=root_url, + salt_release=salt_release, + downloads_path=downloads_path, + gpg_key_name=gpg_key_name, + repo_subpath=repo_subpath, + package_type=package_type, + onedir_install_path=onedir_install_path, + ) + else: + pytest.fail("Don't know how to handle %s", grains["osfinger"]) + yield + finally: + shutil.rmtree(downloads_path, ignore_errors=True) + + +def setup_redhat_family( + shell, + os_name, + os_version, + root_url, + salt_release, + downloads_path, + gpg_key_name, + repo_subpath, +): + arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" + + if repo_subpath == "minor": + repo_url_base = ( + f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" + ) + else: + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}" + + gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" + + try: + pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) + except Exception as exc: # pylint: disable=broad-except + pytest.fail(f"Failed to download {gpg_file_url}: {exc}") + + ret = shell.run("rpm", "--import", str(downloads_path / gpg_key_name), check=False) + if ret.returncode != 0: + pytest.fail("Failed to import gpg key") + + repo_file = pytest.helpers.download_file( + f"{repo_url_base}.repo", downloads_path / f"salt-{os_name}.repo" + ) + + commands = [ + ("mv", str(repo_file), "/etc/yum.repos.d/salt.repo"), + ("yum", "clean", "all" if os_name == "photon" else "expire-cache"), + ( + "yum", + "install", + "-y", + "salt-master", + "salt-minion", + "salt-ssh", + "salt-syndic", + "salt-cloud", + "salt-api", + "salt-debuginfo", + ), + ] + + for cmd in commands: + ret = shell.run(*cmd, check=False) + if ret.returncode != 0: + pytest.fail(f"Failed to run '{' '.join(cmd)!r}':\n{ret}") + + +def setup_debian_family( + shell, + os_name, + os_version, + os_codename, + root_url, + salt_release, + downloads_path, + gpg_key_name, + repo_subpath, + package_type, + onedir_install_path, +): + arch = os.environ.get("SALT_REPO_ARCH") or "amd64" + ret = shell.run("apt-get", "update", "-y", check=False) + if ret.returncode != 0: + pytest.fail(str(ret)) + + if package_type == "package": + if arch == "aarch64": + arch = "arm64" + elif arch == "x86_64": + arch = "amd64" + + if repo_subpath == "minor": + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}/{salt_release}" + else: + repo_url_base = f"{root_url}/{os_name}/{os_version}/{arch}/{repo_subpath}" + gpg_file_url = f"{root_url}/{os_name}/{os_version}/{arch}/{gpg_key_name}" + + try: + pytest.helpers.download_file(gpg_file_url, downloads_path / gpg_key_name) + except Exception as exc: # pylint: disable=broad-except + pytest.fail(f"Failed to download {gpg_file_url}: {exc}") + + salt_sources_path = downloads_path / "salt.list" + salt_sources_path.write_text( + f"deb [signed-by=/usr/share/keyrings/{gpg_key_name} arch={arch}] {repo_url_base} {os_codename} main\n" + ) + commands = [ + ( + "mv", + str(downloads_path / gpg_key_name), + f"/usr/share/keyrings/{gpg_key_name}", + ), + ( + "mv", + str(salt_sources_path), + "/etc/apt/sources.list.d/salt.list", + ), + ("apt-get", "install", "-y", "ca-certificates"), + ("update-ca-certificates",), + ("apt-get", "update"), + ( + "apt-get", + "install", + "-y", + "salt-master", + "salt-minion", + "salt-ssh", + "salt-syndic", + "salt-cloud", + "salt-api", + "salt-dbg", + ), + ] + for cmd in commands: + ret = shell.run(*cmd) + if ret.returncode != 0: + pytest.fail(str(ret)) + else: + # We are testing the onedir download + onedir_name = f"salt-{salt_release}-onedir-linux-{arch}.tar.xz" + if repo_subpath == "minor": + repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}" + else: + repo_url_base = f"{root_url}/onedir/{repo_subpath}" + onedir_url = f"{repo_url_base}/{onedir_name}" + onedir_location = downloads_path / onedir_name + onedir_extracted = onedir_install_path + + try: + pytest.helpers.download_file(onedir_url, onedir_location) + except Exception as exc: # pylint: disable=broad-except + pytest.fail(f"Failed to download {onedir_url}: {exc}") + + shell.run("tar", "xvf", str(onedir_location), "-C", str(onedir_extracted)) + + +def setup_macos( + shell, + root_url, + salt_release, + downloads_path, + repo_subpath, + package_type, + onedir_install_path, +): + arch = os.environ.get("SALT_REPO_ARCH") or "x86_64" + if package_type == "package": + + if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + mac_pkg = f"salt-{salt_release}-py3-{arch}.pkg" + if repo_subpath == "minor": + mac_pkg_url = ( + f"{root_url}/macos/{repo_subpath}/{salt_release}/{mac_pkg}" + ) + else: + mac_pkg_url = f"{root_url}/macos/{repo_subpath}/{mac_pkg}" + else: + mac_pkg_url = f"{root_url}/macos/{salt_release}/{mac_pkg}" + mac_pkg = f"salt-{salt_release}-macos-{arch}.pkg" + + mac_pkg_path = downloads_path / mac_pkg + pytest.helpers.download_file(mac_pkg_url, mac_pkg_path) + + ret = shell.run( + "installer", + "-pkg", + str(mac_pkg_path), + "-target", + "/", + check=False, + ) + assert ret.returncode == 0, ret + else: + # We are testing the onedir download + onedir_name = f"salt-{salt_release}-onedir-darwin-{arch}.tar.xz" + if repo_subpath == "minor": + repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}" + else: + repo_url_base = f"{root_url}/onedir/{repo_subpath}" + onedir_url = f"{repo_url_base}/{onedir_name}" + onedir_location = downloads_path / onedir_name + onedir_extracted = onedir_install_path + + try: + pytest.helpers.download_file(onedir_url, onedir_location) + except Exception as exc: # pylint: disable=broad-except + pytest.fail(f"Failed to download {onedir_url}: {exc}") + + shell.run("tar", "xvf", str(onedir_location), "-C", str(onedir_extracted)) + + +@contextlib.contextmanager +def setup_windows( + shell, + root_url, + salt_release, + downloads_path, + repo_subpath, + package_type, + onedir_install_path, +): + try: + arch = os.environ.get("SALT_REPO_ARCH") or "amd64" + if package_type != "onedir": + root_dir = pathlib.Path(r"C:\Program Files\Salt Project\Salt") + + if packaging.version.parse(salt_release) > packaging.version.parse("3005"): + if package_type.lower() == "nsis": + if arch.lower() != "x86": + arch = arch.upper() + win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}-Setup.exe" + else: + if arch.lower() != "x86": + arch = arch.upper() + win_pkg = f"Salt-Minion-{salt_release}-Py3-{arch}.msi" + if repo_subpath == "minor": + win_pkg_url = ( + f"{root_url}/windows/{repo_subpath}/{salt_release}/{win_pkg}" + ) + else: + win_pkg_url = f"{root_url}/windows/{repo_subpath}/{win_pkg}" + ssm_bin = root_dir / "ssm.exe" + else: + win_pkg = f"salt-{salt_release}-windows-{arch}.exe" + win_pkg_url = f"{root_url}/windows/{salt_release}/{win_pkg}" + ssm_bin = root_dir / "bin" / "ssm_bin" + + pkg_path = downloads_path / win_pkg + + pytest.helpers.download_file(win_pkg_url, pkg_path) + if package_type.lower() == "nsis": + ret = shell.run(str(pkg_path), "/start-minion=0", "/S", check=False) + else: + ret = shell.run( + "msiexec", "/qn", "/i", str(pkg_path), 'START_MINION=""' + ) + assert ret.returncode == 0, ret + + log.debug("Removing installed salt-minion service") + ret = shell.run( + "cmd", + "/c", + str(ssm_bin), + "remove", + "salt-minion", + "confirm", + check=False, + ) + assert ret.returncode == 0, ret + else: + # We are testing the onedir download + onedir_name = f"salt-{salt_release}-onedir-windows-{arch}.zip" + if repo_subpath == "minor": + repo_url_base = f"{root_url}/onedir/{repo_subpath}/{salt_release}" + else: + repo_url_base = f"{root_url}/onedir/{repo_subpath}" + onedir_url = f"{repo_url_base}/{onedir_name}" + onedir_location = downloads_path / onedir_name + onedir_extracted = onedir_install_path + + try: + pytest.helpers.download_file(onedir_url, onedir_location) + except Exception as exc: # pylint: disable=broad-except + pytest.fail(f"Failed to download {onedir_url}: {exc}") + + shell.run("unzip", str(onedir_location), "-d", str(onedir_extracted)) + yield + finally: + # We need to uninstall the MSI packages, otherwise they will not install correctly + if package_type.lower() == "msi": + ret = shell.run("msiexec", "/qn", "/x", str(pkg_path)) + assert ret.returncode == 0, ret + + +@pytest.fixture(scope="module") +def install_dir(_setup_system, package_type, onedir_install_path): + if package_type != "onedir": + if platform.is_windows(): + return pathlib.Path( + os.getenv("ProgramFiles"), "Salt Project", "Salt" + ).resolve() + if platform.is_darwin(): + return pathlib.Path("/opt", "salt") + return pathlib.Path("/opt", "saltstack", "salt") + else: + # We are testing the onedir + return onedir_install_path / "salt" + + +@pytest.fixture(scope="module") +def salt_test_command(request, install_dir): + command = request.param + command[0] = str(install_dir / command[0]) + return command + + +@pytest.mark.parametrize("salt_test_command", get_salt_test_commands(), indirect=True) +def test_download(shell, salt_test_command): + """ + Test downloading of Salt packages and running various commands. + """ + ret = shell.run(*salt_test_command, check=False) + assert ret.returncode == 0, ret diff --git a/tests/pytests/functional/utils/pyinstaller/__init__.py b/tests/pytests/pkg/integration/__init__.py similarity index 100% rename from tests/pytests/functional/utils/pyinstaller/__init__.py rename to tests/pytests/pkg/integration/__init__.py diff --git a/tests/pytests/pkg/integration/test_check_imports.py b/tests/pytests/pkg/integration/test_check_imports.py new file mode 100644 index 000000000000..eac49f48fac2 --- /dev/null +++ b/tests/pytests/pkg/integration/test_check_imports.py @@ -0,0 +1,102 @@ +import logging +import subprocess + +import pytest +from pytestskipmarkers.utils import platform +from saltfactories.utils.functional import MultiStateResult + +pytestmark = [ + pytest.mark.skip_on_windows, +] + +log = logging.getLogger(__name__) + + +CHECK_IMPORTS_SLS_CONTENTS = """ +#!py +import importlib + +def run(): + config = {} + for module in [ + 'templates', 'platform', 'cli', 'executors', 'config', 'wheel', 'netapi', + 'cache', 'proxy', 'transport', 'metaproxy', 'modules', 'tokens', 'matchers', + 'acl', 'auth', 'log', 'engines', 'client', 'returners', 'runners', 'tops', + 'output', 'daemons', 'thorium', 'renderers', 'states', 'cloud', 'roster', + 'beacons', 'pillar', 'spm', 'utils', 'sdb', 'fileserver', 'defaults', + 'ext', 'queues', 'grains', 'serializers' + ]: + import_name = "salt.{}".format(module) + try: + importlib.import_module(import_name) + config[import_name] = { + 'test.succeed_without_changes': [ + { + "name": import_name, + 'comment': "The '{}' import succeeded.".format(import_name) + } + ] + } + except ModuleNotFoundError as err: + config[import_name] = { + 'test.fail_without_changes': [ + { + "name": import_name, + 'comment': "The '{}' import failed. The error was: {}".format(import_name, err) + } + ] + } + + for import_name in ["telnetlib"]: + try: + importlib.import_module(import_name) + config[import_name] = { + 'test.succeed_without_changes': [ + { + "name": import_name, + 'comment': "The '{}' import succeeded.".format(import_name) + } + ] + } + except ModuleNotFoundError as err: + config[import_name] = { + 'test.fail_without_changes': [ + { + "name": import_name, + 'comment': "The '{}' import failed. The error was: {}".format(import_name, err) + } + ] + } + return config +""" + + +@pytest.fixture +def state_name(salt_master): + name = "check-imports" + with salt_master.state_tree.base.temp_file( + f"{name}.sls", CHECK_IMPORTS_SLS_CONTENTS + ): + if not platform.is_windows() and not platform.is_darwin(): + subprocess.run( + [ + "chown", + "-R", + "salt:salt", + str(salt_master.state_tree.base.write_path), + ], + check=False, + ) + yield name + + +def test_check_imports(salt_cli, salt_minion, state_name): + """ + Test imports + """ + ret = salt_cli.run("state.sls", state_name, minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert ret.data + result = MultiStateResult(raw=ret.data) + for state_ret in result: + assert state_ret.result is True diff --git a/tests/pytests/pkg/integration/test_clean_zmq_teardown.py b/tests/pytests/pkg/integration/test_clean_zmq_teardown.py new file mode 100644 index 000000000000..309493e69aa5 --- /dev/null +++ b/tests/pytests/pkg/integration/test_clean_zmq_teardown.py @@ -0,0 +1,61 @@ +import logging +import pathlib +import shutil +import textwrap + +import pytest + +pytestmark = [ + pytest.mark.skip_on_windows, +] + +log = logging.getLogger(__name__) + + +@pytest.fixture(autouse=True) +def _skip_on_non_relenv(install_salt): + if not install_salt.relenv: + pytest.skip("This test is for relenv versions of salt") + + +def test_check_no_import_error(salt_call_cli, salt_master): + """ + Test that we don't have any errors on teardown of python when using a py-rendered sls file + This is a package test because the issue was not reproducible in our normal test suite + """ + init_sls = textwrap.dedent( + """#!py + + +def run(): + return { + "file_foobar": { + "file.managed": [ + { + "name": "/foobar" + }, + { + "template": "jinja" + }, + { + "context": { + "foobar": "baz", + } + }, + { + "source": "salt://breaks/foobar.jinja", + } + ] + } + } + """ + ) + base_tree = pathlib.Path(salt_master.config["file_roots"]["base"][0]) + breaks_tree = base_tree / "breaks" + breaks_tree.mkdir(exist_ok=True) + (breaks_tree / "init.sls").write_text(init_sls) + (breaks_tree / "foobar.jinja").write_text("{{ foobar }}") + output = salt_call_cli.run("state.apply", "breaks", "--output-diff", "test=true") + log.debug(output.stderr) + shutil.rmtree(str(breaks_tree), ignore_errors=True) + assert not output.stderr diff --git a/pkg/tests/integration/test_enabled_disabled.py b/tests/pytests/pkg/integration/test_enabled_disabled.py similarity index 56% rename from pkg/tests/integration/test_enabled_disabled.py rename to tests/pytests/pkg/integration/test_enabled_disabled.py index 2bedb87d6505..c6f0d75db8fb 100644 --- a/pkg/tests/integration/test_enabled_disabled.py +++ b/tests/pytests/pkg/integration/test_enabled_disabled.py @@ -1,6 +1,5 @@ import pytest from pytestskipmarkers.utils import platform -from saltfactories.utils.functional import MultiStateResult @pytest.mark.skip_on_windows(reason="Linux test only") @@ -8,15 +7,24 @@ def test_services(install_salt, salt_cli, salt_minion): """ Check if Services are enabled/disabled """ - if install_salt.compressed: - pytest.skip("Skip test on single binary and onedir package") - if install_salt.distro_id in ("ubuntu", "debian"): services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"] services_disabled = [] elif install_salt.distro_id in ("centos", "redhat", "amzn", "fedora"): services_enabled = [] services_disabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"] + elif install_salt.distro_id == "photon": + if float(install_salt.distro_version) < 5: + services_enabled = [] + services_disabled = [ + "salt-master", + "salt-minion", + "salt-syndic", + "salt-api", + ] + else: + services_enabled = ["salt-master", "salt-minion", "salt-syndic", "salt-api"] + services_disabled = [] elif platform.is_darwin(): services_enabled = ["salt-minion"] services_disabled = [] @@ -24,17 +32,9 @@ def test_services(install_salt, salt_cli, salt_minion): pytest.fail(f"Don't know how to handle os_family={install_salt.distro_id}") for service in services_enabled: - assert ( - "true" - in salt_cli.run( - "service.enabled", service, minion_tgt=salt_minion.id - ).stdout - ) + ret = salt_cli.run("service.enabled", service, minion_tgt=salt_minion.id) + assert "true" in ret.stdout for service in services_disabled: - assert ( - "true" - in salt_cli.run( - "service.disabled", service, minion_tgt=salt_minion.id - ).stdout - ) + ret = salt_cli.run("service.disabled", service, minion_tgt=salt_minion.id) + assert "true" in ret.stdout diff --git a/tests/pytests/pkg/integration/test_help.py b/tests/pytests/pkg/integration/test_help.py new file mode 100644 index 000000000000..b5070638fe2d --- /dev/null +++ b/tests/pytests/pkg/integration/test_help.py @@ -0,0 +1,25 @@ +import subprocess + + +def test_help(install_salt): + """ + Test --help works for all salt cmds + """ + for cmd in install_salt.binary_paths.values(): + cmd = [str(x) for x in cmd] + + if len(cmd) > 1 and "shell" in cmd[1]: + # Singlebin build, unable to get the version + continue + + if "python" in cmd[0] and len(cmd) == 1: + ret = install_salt.proc.run( + *cmd, "--version", stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + assert "Python" in ret.stdout + else: + ret = install_salt.proc.run( + *cmd, "--help", stdout=subprocess.PIPE, stderr=subprocess.PIPE + ) + assert "Usage" in ret.stdout + assert ret.returncode == 0 diff --git a/tests/pytests/pkg/integration/test_logrotate_config.py b/tests/pytests/pkg/integration/test_logrotate_config.py new file mode 100644 index 000000000000..fea0123b6eb6 --- /dev/null +++ b/tests/pytests/pkg/integration/test_logrotate_config.py @@ -0,0 +1,46 @@ +""" +Tests for logrotate config +""" + +import pathlib + +import packaging.version +import pytest + +pytestmark = [ + pytest.mark.skip_unless_on_linux, +] + + +@pytest.fixture +def logrotate_config_file(grains): + """ + Fixture for logrotate config file path + """ + if grains["os_family"] == "RedHat": + return pathlib.Path("/etc/logrotate.d", "salt") + elif grains["os_family"] == "Debian": + return pathlib.Path("/etc/logrotate.d", "salt-common") + + +def test_logrotate_config(logrotate_config_file): + """ + Test that logrotate config has been installed in correctly + """ + assert logrotate_config_file.is_file() + assert logrotate_config_file.owner() == "root" + assert logrotate_config_file.group() == "root" + + +def test_issue_65231_etc_logrotate_salt_dir_removed(install_salt): + """ + Test that /etc/logrotate.d/salt is not a directory + """ + if install_salt.prev_version and packaging.version.parse( + install_salt.prev_version + ) <= packaging.version.parse("3006.4"): + pytest.skip("Testing a downgrade to 3006.4, do not run") + + path = pathlib.Path("/etc/logrotate.d/salt") + if path.exists(): + assert path.is_dir() is False diff --git a/tests/pytests/pkg/integration/test_multi_minion.py b/tests/pytests/pkg/integration/test_multi_minion.py new file mode 100644 index 000000000000..c8f0659a684b --- /dev/null +++ b/tests/pytests/pkg/integration/test_multi_minion.py @@ -0,0 +1,138 @@ +import os +import pathlib +import subprocess + +import packaging.version +import psutil +import pytest + +pytestmark = [ + pytest.mark.skip_unless_on_windows, +] + + +@pytest.fixture(autouse=True) +def _skip_on_less_than_3006_1(install_salt): + if packaging.version.parse(install_salt.version) <= packaging.version.parse( + "3006.1" + ): + pytest.skip( + "Multi-minion script only available on versions greater than 3006.1" + ) + + +@pytest.fixture +def mm_script(install_salt): + yield install_salt.ssm_bin.parent / "multi-minion.ps1" + + +@pytest.fixture(scope="function") +def mm_conf(mm_script): + yield pathlib.Path(os.getenv("LocalAppData"), "Salt Project", "Salt", "conf") + subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-d"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + + +def test_script_present(mm_script): + """ + Ensure the multi-minion.ps1 file is present in the root of the installation + """ + assert mm_script.exists() + + +def test_install(mm_script, mm_conf): + """ + Install a second minion with default settings. Should create a minion config + file in Local AppData + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '")], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("master: salt") > -1 + + +def test_install_master(mm_script, mm_conf): + """ + Install a second minion and set the master to spongebob + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-m", "spongebob"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("master: spongebob") > -1 + + +def test_install_prefix(mm_script, mm_conf): + """ + Install a second minion and add a prefix to the minion id + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-p", "squarepants"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("id: squarepants") > -1 + + +def test_install_log_level(mm_script, mm_conf): + """ + Install a second minion and set the log level in the log file to debug + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-l", "debug"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("log_level_logfile: debug") > -1 + + +def test_install_start(mm_script, mm_conf): + """ + Install a second minion and start that minion in a hidden process + """ + ret = subprocess.run( + ["powershell", str(mm_script).replace(" ", "' '"), "-s"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + conf_file = mm_conf / "minion" + assert conf_file.exists() + assert conf_file.read_text().find("master: salt") > -1 + + found = False + for p in psutil.process_iter(["cmdline", "name"]): + if p.info["name"] and p.info["name"] == "salt-minion.exe": + if f"{mm_conf}" in p.info["cmdline"]: + found = True + assert found is True diff --git a/tests/pytests/pkg/integration/test_pip.py b/tests/pytests/pkg/integration/test_pip.py new file mode 100644 index 000000000000..b10adb3c1051 --- /dev/null +++ b/tests/pytests/pkg/integration/test_pip.py @@ -0,0 +1,221 @@ +import json +import os +import pathlib +import shutil +import subprocess + +import pytest +from pytestskipmarkers.utils import platform + + +@pytest.fixture +def pypath(): + if platform.is_windows(): + return pathlib.Path(os.getenv("ProgramFiles"), "Salt Project", "Salt") + else: + return pathlib.Path("/opt", "saltstack", "salt", "pypath", "bin") + + +@pytest.fixture(autouse=True) +def wipe_pydeps(shell, install_salt, extras_pypath): + try: + yield + finally: + # Note, uninstalling anything with an associated script will leave the script. + # This is due to a bug in pip. + for dep in ["pep8", "PyGithub"]: + shell.run( + *(install_salt.binary_paths["pip"] + ["uninstall", "-y", dep]), + ) + # Let's remove everything under the extras directory, uninstalling doesn't get dependencies + dirs = [] + files = [] + for filename in extras_pypath.glob("**/**"): + if filename != extras_pypath and filename.exists(): + if filename.is_dir(): + dirs.append(filename) + else: + files.append(filename) + for fp in files: + fp.unlink() + for dirname in dirs: + shutil.rmtree(dirname, ignore_errors=True) + + +@pytest.fixture +def pkg_tests_account_environ(pkg_tests_account): + environ = os.environ.copy() + environ["LOGNAME"] = environ["USER"] = pkg_tests_account.username + environ["HOME"] = pkg_tests_account.info.home + return environ + + +def test_pip_install(salt_call_cli, install_salt, shell): + """ + Test pip.install and ensure module can use installed library + """ + dep = "PyGithub==1.56.0" + repo = "https://github.com/saltstack/salt.git" + + try: + install = salt_call_cli.run("--local", "pip.install", dep) + assert install.returncode == 0 + + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr + finally: + ret = salt_call_cli.run("--local", "pip.uninstall", dep) + assert ret.returncode == 0 + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "The github execution module cannot be loaded" in use_lib.stderr + + +def test_pip_install_extras(shell, install_salt, extras_pypath_bin): + """ + Test salt-pip installs into the correct directory + """ + if not install_salt.relenv: + pytest.skip("The extras directory is only in relenv versions") + dep = "pep8" + extras_keyword = "extras-3" + if platform.is_windows(): + check_path = extras_pypath_bin / f"{dep}.exe" + else: + check_path = extras_pypath_bin / dep + + install_ret = shell.run(*(install_salt.binary_paths["pip"] + ["install", dep])) + assert install_ret.returncode == 0 + + ret = shell.run(*(install_salt.binary_paths["pip"] + ["list", "--format=json"])) + assert ret.returncode == 0 + assert ret.data # We can parse the JSON output + for pkg in ret.data: + if pkg["name"] == dep: + break + else: + pytest.fail( + f"The {dep!r} package was not found installed. Packages Installed: {ret.data}" + ) + + show_ret = shell.run(*(install_salt.binary_paths["pip"] + ["show", dep])) + assert show_ret.returncode == 0 + assert extras_keyword in show_ret.stdout + assert check_path.exists() + + ret = shell.run(str(check_path), "--version") + assert ret.returncode == 0 + + +def demote(account): + def result(): + # os.setgid does not remove group membership, so we remove them here so they are REALLY non-root + os.setgroups([]) + os.setgid(account.info.gid) + os.setuid(account.info.uid) + + return result + + +@pytest.mark.skip_on_windows(reason="We can't easily demote users on Windows") +def test_pip_non_root( + shell, + install_salt, + pkg_tests_account, + extras_pypath_bin, + pypath, + pkg_tests_account_environ, +): + if install_salt.classic: + pytest.skip("We can install non-root for classic packages") + check_path = extras_pypath_bin / "pep8" + if not install_salt.relenv and not install_salt.classic: + check_path = pypath / "pep8" + # We should be able to issue a --help without being root + ret = subprocess.run( + install_salt.binary_paths["salt"] + ["--help"], + preexec_fn=demote(pkg_tests_account), + env=pkg_tests_account_environ, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + assert "Usage" in ret.stdout + + # Let tiamat-pip create the pypath directory for us + ret = subprocess.run( + install_salt.binary_paths["pip"] + ["install", "-h"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode == 0, ret.stderr + + # Now, we should still not be able to install as non-root + ret = subprocess.run( + install_salt.binary_paths["pip"] + ["install", "pep8"], + preexec_fn=demote(pkg_tests_account), + env=pkg_tests_account_environ, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert ret.returncode != 0, ret.stderr + # But we should be able to install as root + ret = subprocess.run( + install_salt.binary_paths["pip"] + ["install", "pep8"], + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + + assert check_path.exists(), shutil.which("pep8") + + assert ret.returncode == 0, ret.stderr + + +def test_pip_install_salt_extension_in_extras(install_salt, extras_pypath, shell): + """ + Test salt-pip installs into the correct directory and the salt extension + is properly loaded. + """ + if not install_salt.relenv: + pytest.skip("The extras directory is only in relenv versions") + dep = "salt-analytics-framework" + dep_version = "0.1.0" + + install_ret = shell.run( + *(install_salt.binary_paths["pip"] + ["install", f"{dep}=={dep_version}"]), + ) + assert install_ret.returncode == 0 + + ret = shell.run( + *(install_salt.binary_paths["pip"] + ["list", "--format=json"]), + ) + assert ret.returncode == 0 + pkgs_installed = json.loads(ret.stdout.strip()) + for pkg in pkgs_installed: + if pkg["name"] == dep: + break + else: + pytest.fail( + f"The {dep!r} package was not found installed. Packages Installed: {pkgs_installed}" + ) + + show_ret = shell.run( + *(install_salt.binary_paths["pip"] + ["show", dep]), + ) + assert show_ret.returncode == 0 + + assert extras_pypath.joinpath("saf").is_dir() + + ret = shell.run( + *(install_salt.binary_paths["minion"] + ["--versions-report"]), + ) + assert show_ret.returncode == 0 + assert "Salt Extensions" in ret.stdout + assert f"{dep}: {dep_version}" in ret.stdout diff --git a/pkg/tests/integration/test_pip_upgrade.py b/tests/pytests/pkg/integration/test_pip_upgrade.py similarity index 100% rename from pkg/tests/integration/test_pip_upgrade.py rename to tests/pytests/pkg/integration/test_pip_upgrade.py diff --git a/pkg/tests/integration/test_pkg.py b/tests/pytests/pkg/integration/test_pkg.py similarity index 61% rename from pkg/tests/integration/test_pkg.py rename to tests/pytests/pkg/integration/test_pkg.py index ef220b124b69..bb84e5b9e278 100644 --- a/pkg/tests/integration/test_pkg.py +++ b/tests/pytests/pkg/integration/test_pkg.py @@ -1,26 +1,30 @@ import sys +import time import pytest -@pytest.fixture(scope="module") -def grains(salt_call_cli): - ret = salt_call_cli.run("--local", "grains.items") - assert ret.data, ret - return ret.data - - @pytest.fixture(scope="module") def pkg_name(salt_call_cli, grains): if sys.platform.startswith("win"): ret = salt_call_cli.run("--local", "winrepo.update_git_repos") assert ret.returncode == 0 - ret = salt_call_cli.run("--local", "pkg.refresh_db") - assert ret.returncode == 0 + attempts = 3 + while attempts: + attempts -= 1 + ret = salt_call_cli.run("--local", "pkg.refresh_db") + if ret.returncode: + time.sleep(5) + continue + break + else: + pytest.fail("Failed to run 'pkg.refresh_db' 3 times.") return "putty" elif grains["os_family"] == "RedHat": if grains["os"] == "VMware Photon OS": return "snoopy" + elif grains["osfinger"] == "Amazon Linux-2023": + return "dnf-utils" return "units" elif grains["os_family"] == "Debian": return "ifenslave" diff --git a/tests/pytests/pkg/integration/test_python.py b/tests/pytests/pkg/integration/test_python.py new file mode 100644 index 000000000000..9b16cea37964 --- /dev/null +++ b/tests/pytests/pkg/integration/test_python.py @@ -0,0 +1,75 @@ +import subprocess +import textwrap + +import pytest + + +@pytest.fixture +def python_script_bin(install_salt): + # Tiamat builds run scripts via `salt python` + if not install_salt.relenv and not install_salt.classic: + return install_salt.binary_paths["python"][:1] + ["python"] + return install_salt.binary_paths["python"] + + +@pytest.fixture +def check_python_file(tmp_path): + script_path = tmp_path / "check_python.py" + script_path.write_text( + textwrap.dedent( + """ + import sys + + import salt.utils.data + + user_arg = sys.argv + + if user_arg[1] == "raise": + raise Exception("test") + + if salt.utils.data.is_true(user_arg[1]): + sys.exit(0) + else: + sys.exit(1) + """ + ) + ) + return script_path + + +@pytest.mark.parametrize("exp_ret,user_arg", [(1, "false"), (0, "true")]) +def test_python_script( + install_salt, exp_ret, user_arg, python_script_bin, check_python_file +): + ret = install_salt.proc.run( + *( + python_script_bin + + [ + str(check_python_file), + user_arg, + ] + ), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + + assert ret.returncode == exp_ret, ret.stderr + + +def test_python_script_exception(install_salt, python_script_bin, check_python_file): + ret = install_salt.proc.run( + *( + python_script_bin + + [ + str(check_python_file), + "raise", + ] + ), + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + check=False, + universal_newlines=True, + ) + assert "Exception: test" in ret.stderr diff --git a/pkg/tests/integration/test_salt_api.py b/tests/pytests/pkg/integration/test_salt_api.py similarity index 100% rename from pkg/tests/integration/test_salt_api.py rename to tests/pytests/pkg/integration/test_salt_api.py diff --git a/pkg/tests/integration/test_salt_call.py b/tests/pytests/pkg/integration/test_salt_call.py similarity index 52% rename from pkg/tests/integration/test_salt_call.py rename to tests/pytests/pkg/integration/test_salt_call.py index da7834b70b18..69f434a2c40a 100644 --- a/pkg/tests/integration/test_salt_call.py +++ b/tests/pytests/pkg/integration/test_salt_call.py @@ -1,4 +1,7 @@ +import subprocess + import pytest +from pytestskipmarkers.utils import platform def test_salt_call_local(salt_call_cli): @@ -6,8 +9,8 @@ def test_salt_call_local(salt_call_cli): Test salt-call --local test.ping """ ret = salt_call_cli.run("--local", "test.ping") - assert ret.data is True assert ret.returncode == 0 + assert ret.data is True def test_salt_call(salt_call_cli): @@ -15,19 +18,41 @@ def test_salt_call(salt_call_cli): Test salt-call test.ping """ ret = salt_call_cli.run("test.ping") - assert ret.data is True assert ret.returncode == 0 + assert ret.data is True + + +@pytest.fixture +def state_name(salt_master): + name = "some-test-state" + sls_contents = """ + test_foo: + test.succeed_with_changes: + - name: foo + """ + with salt_master.state_tree.base.temp_file(f"{name}.sls", sls_contents): + if not platform.is_windows() and not platform.is_darwin(): + subprocess.run( + [ + "chown", + "-R", + "salt:salt", + str(salt_master.state_tree.base.write_path), + ], + check=False, + ) + yield name -def test_sls(salt_call_cli): +def test_sls(salt_call_cli, state_name): """ Test calling a sls file """ - ret = salt_call_cli.run("state.apply", "test") - assert ret.data, ret + ret = salt_call_cli.run("state.apply", state_name) + assert ret.returncode == 0 + assert ret.data sls_ret = ret.data[next(iter(ret.data))] assert sls_ret["changes"]["testing"]["new"] == "Something pretended to change" - assert ret.returncode == 0 def test_salt_call_local_sys_doc_none(salt_call_cli): @@ -35,8 +60,8 @@ def test_salt_call_local_sys_doc_none(salt_call_cli): Test salt-call --local sys.doc none """ ret = salt_call_cli.run("--local", "sys.doc", "none") - assert not ret.data assert ret.returncode == 0 + assert not ret.data def test_salt_call_local_sys_doc_aliases(salt_call_cli): @@ -44,16 +69,18 @@ def test_salt_call_local_sys_doc_aliases(salt_call_cli): Test salt-call --local sys.doc aliases """ ret = salt_call_cli.run("--local", "sys.doc", "aliases.list_aliases") - assert "aliases.list_aliases" in ret.data assert ret.returncode == 0 + assert "aliases.list_aliases" in ret.data -@pytest.mark.skip_on_windows() -def test_salt_call_cmd_run_id_runas(salt_call_cli, test_account, caplog): +@pytest.mark.skip_on_windows +def test_salt_call_cmd_run_id_runas(salt_call_cli, pkg_tests_account, caplog): """ Test salt-call --local cmd_run id with runas """ - ret = salt_call_cli.run("--local", "cmd.run", "id", runas=test_account.username) + ret = salt_call_cli.run( + "--local", "cmd.run", "id", runas=pkg_tests_account.username + ) assert "Environment could not be retrieved for user" not in caplog.text - assert str(test_account.uid) in ret.stdout - assert str(test_account.gid) in ret.stdout + assert str(pkg_tests_account.info.uid) in ret.stdout + assert str(pkg_tests_account.info.gid) in ret.stdout diff --git a/pkg/tests/integration/test_salt_exec.py b/tests/pytests/pkg/integration/test_salt_exec.py similarity index 65% rename from pkg/tests/integration/test_salt_exec.py rename to tests/pytests/pkg/integration/test_salt_exec.py index ce12d10e4dd2..2e28999d7c3e 100644 --- a/pkg/tests/integration/test_salt_exec.py +++ b/tests/pytests/pkg/integration/test_salt_exec.py @@ -7,15 +7,22 @@ ] -def test_salt_cmd_run(salt_cli, salt_minion): +@pytest.fixture +def cat_file(tmp_path): + fp = tmp_path / "cat-file" + fp.write_text(str(fp)) + return fp + + +def test_salt_cmd_run(salt_cli, salt_minion, cat_file): """ - Test salt cmd.run 'ipconfig' or 'ls -lah /' + Test salt cmd.run 'ipconfig' or 'cat ' """ ret = None if platform.startswith("win"): ret = salt_cli.run("cmd.run", "ipconfig", minion_tgt=salt_minion.id) else: - ret = salt_cli.run("cmd.run", "ls -lah /", minion_tgt=salt_minion.id) + ret = salt_cli.run("cmd.run", f"cat {str(cat_file)}", minion_tgt=salt_minion.id) assert ret assert ret.stdout diff --git a/pkg/tests/integration/test_salt_grains.py b/tests/pytests/pkg/integration/test_salt_grains.py similarity index 100% rename from pkg/tests/integration/test_salt_grains.py rename to tests/pytests/pkg/integration/test_salt_grains.py diff --git a/pkg/tests/integration/test_salt_key.py b/tests/pytests/pkg/integration/test_salt_key.py similarity index 100% rename from pkg/tests/integration/test_salt_key.py rename to tests/pytests/pkg/integration/test_salt_key.py diff --git a/pkg/tests/integration/test_salt_minion.py b/tests/pytests/pkg/integration/test_salt_minion.py similarity index 100% rename from pkg/tests/integration/test_salt_minion.py rename to tests/pytests/pkg/integration/test_salt_minion.py diff --git a/pkg/tests/integration/test_salt_output.py b/tests/pytests/pkg/integration/test_salt_output.py similarity index 100% rename from pkg/tests/integration/test_salt_output.py rename to tests/pytests/pkg/integration/test_salt_output.py diff --git a/tests/pytests/pkg/integration/test_salt_pillar.py b/tests/pytests/pkg/integration/test_salt_pillar.py new file mode 100644 index 000000000000..f6cacf14b3ca --- /dev/null +++ b/tests/pytests/pkg/integration/test_salt_pillar.py @@ -0,0 +1,44 @@ +import subprocess + +import pytest +from pytestskipmarkers.utils import platform + +pytestmark = [ + pytest.mark.skip_on_windows, +] + + +@pytest.fixture +def pillar_name(salt_master): + name = "info" + top_file_contents = """ + base: + '*': + - test + """ + test_file_contents = f""" + {name}: test + """ + with salt_master.pillar_tree.base.temp_file( + "top.sls", top_file_contents + ), salt_master.pillar_tree.base.temp_file("test.sls", test_file_contents): + if not platform.is_windows() and not platform.is_darwin(): + subprocess.run( + [ + "chown", + "-R", + "salt:salt", + str(salt_master.pillar_tree.base.write_path), + ], + check=False, + ) + yield name + + +def test_salt_pillar(salt_cli, salt_minion, pillar_name): + """ + Test pillar.items + """ + ret = salt_cli.run("pillar.items", minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert pillar_name in ret.data diff --git a/tests/pytests/pkg/integration/test_salt_state_file.py b/tests/pytests/pkg/integration/test_salt_state_file.py new file mode 100644 index 000000000000..7b71fcb2365e --- /dev/null +++ b/tests/pytests/pkg/integration/test_salt_state_file.py @@ -0,0 +1,70 @@ +import subprocess +import types + +import pytest +from pytestskipmarkers.utils import platform +from saltfactories.utils.functional import MultiStateResult + + +@pytest.fixture +def files(tmp_path): + return types.SimpleNamespace( + fpath_1=tmp_path / "fpath_1.txt", + fpath_2=tmp_path / "fpath_2.txt", + fpath_3=tmp_path / "fpath_3.txt", + ) + + +@pytest.fixture +def state_name(files, salt_master): + name = "some-state" + sls_contents = f""" + create-fpath-1-file: + file.managed: + - name: {files.fpath_1} + + create-fpath-2-file: + file.managed: + - name: {files.fpath_2} + + create-fpath-3-file: + file.managed: + - name: {files.fpath_3} + """ + assert files.fpath_1.exists() is False + assert files.fpath_2.exists() is False + assert files.fpath_3.exists() is False + with salt_master.state_tree.base.temp_file(f"{name}.sls", sls_contents): + if not platform.is_windows() and not platform.is_darwin(): + subprocess.run( + [ + "chown", + "-R", + "salt:salt", + str(salt_master.state_tree.base.write_path), + ], + check=False, + ) + yield name + + +def test_salt_state_file(salt_cli, salt_minion, state_name, files): + """ + Test state file + """ + assert files.fpath_1.exists() is False + assert files.fpath_2.exists() is False + assert files.fpath_3.exists() is False + + ret = salt_cli.run("state.apply", state_name, minion_tgt=salt_minion.id) + assert ret.returncode == 0 + assert ret.data + if ret.stdout and "Minion did not return" in ret.stdout: + pytest.skip("Skipping test, state took too long to apply") + + for state_return in MultiStateResult(ret.data): + assert state_return.result is True + + assert files.fpath_1.exists() is True + assert files.fpath_2.exists() is True + assert files.fpath_3.exists() is True diff --git a/tests/pytests/pkg/integration/test_salt_ufw.py b/tests/pytests/pkg/integration/test_salt_ufw.py new file mode 100644 index 000000000000..2164de85c572 --- /dev/null +++ b/tests/pytests/pkg/integration/test_salt_ufw.py @@ -0,0 +1,38 @@ +import pathlib + +import pytest + + +@pytest.mark.skip_on_windows +@pytest.mark.skip_if_binaries_missing("ufw") +def test_salt_ufw(salt_master, salt_call_cli, install_salt): + """ + Test salt.ufw for Debian/Ubuntu salt-master + """ + if install_salt.distro_id not in ("debian", "ubuntu"): + pytest.skip("Only tests Debian / Ubuntu packages") + + # check that the salt_master is running + assert salt_master.is_running() + + ufw_master_path = pathlib.Path("/etc/ufw/applications.d/salt.ufw") + assert ufw_master_path.exists() + assert ufw_master_path.is_file() + + ufw_list_cmd = "/usr/sbin/ufw app list" + ret = salt_call_cli.run("--local", "cmd.run", ufw_list_cmd) + assert "Available applications" in ret.stdout + assert "Salt" in ret.stdout + ufw_upd_cmd = "/usr/sbin/ufw app update Salt" + ret = salt_call_cli.run("--local", "cmd.run", ufw_upd_cmd) + assert ret.returncode == 0 + expected_info = """Profile: Salt +Title: salt +Description: fast and powerful configuration management and remote +execution + +Ports: + 4505,4506/tcp""" + ufw_info_cmd = "/usr/sbin/ufw app info Salt" + ret = salt_call_cli.run("--local", "cmd.run", ufw_info_cmd) + assert expected_info in ret.data diff --git a/tests/pytests/pkg/integration/test_salt_user.py b/tests/pytests/pkg/integration/test_salt_user.py new file mode 100644 index 000000000000..4538ce79adbb --- /dev/null +++ b/tests/pytests/pkg/integration/test_salt_user.py @@ -0,0 +1,363 @@ +import os +import pathlib +import subprocess +import sys + +import packaging.version +import psutil +import pytest +from saltfactories.utils.tempfiles import temp_directory + +pytestmark = [ + pytest.mark.skip_on_windows, + pytest.mark.skip_on_darwin, +] + + +@pytest.fixture +def pkg_paths(): + """ + Paths created by package installs + """ + paths = [ + "/etc/salt", + "/var/cache/salt", + "/var/log/salt", + "/var/run/salt", + "/opt/saltstack/salt", + ] + return paths + + +@pytest.fixture +def pkg_paths_salt_user(): + """ + Paths created by package installs and owned by salt user + """ + return [ + "/etc/salt/cloud.deploy.d", + "/var/log/salt/cloud", + "/opt/saltstack/salt/lib/python{}.{}/site-packages/salt/cloud/deploy".format( + *sys.version_info + ), + "/etc/salt/pki/master", + "/etc/salt/master.d", + "/var/log/salt/master", + "/var/log/salt/api", + "/var/log/salt/key", + "/var/cache/salt/master", + "/var/run/salt/master", + ] + + +@pytest.fixture +def pkg_paths_salt_user_exclusions(): + """ + Exclusions from paths created by package installs and owned by salt user + """ + paths = [ + "/var/cache/salt/master/.root_key" # written by salt, salt-run and salt-key as root + ] + return paths + + +@pytest.fixture(autouse=True) +def _skip_on_non_relenv(install_salt): + if not install_salt.relenv: + pytest.skip("The salt user only exists on relenv versions of salt") + + +def test_salt_user_master(salt_master, install_salt): + """ + Test the correct user is running the Salt Master + """ + match = False + for proc in psutil.Process(salt_master.pid).children(): + assert proc.username() == "salt" + match = True + + assert match + + +def test_salt_user_home(install_salt): + """ + Test the salt user's home is /opt/saltstack/salt + """ + proc = subprocess.run( + ["getent", "passwd", "salt"], check=False, capture_output=True + ) + assert proc.returncode == 0 + home = "" + try: + home = proc.stdout.decode().split(":")[5] + except Exception: # pylint: disable=broad-except + pass + assert home == "/opt/saltstack/salt" + + +def test_salt_user_group(install_salt): + """ + Test the salt user is in the salt group + """ + proc = subprocess.run(["id", "salt"], check=False, capture_output=True) + assert proc.returncode == 0 + in_group = False + try: + for group in proc.stdout.decode().split(" "): + if "salt" in group: + in_group = True + except Exception: # pylint: disable=broad-except + pass + assert in_group is True + + +def test_salt_user_shell(install_salt): + """ + Test the salt user's login shell + """ + proc = subprocess.run( + ["getent", "passwd", "salt"], check=False, capture_output=True + ) + assert proc.returncode == 0 + shell = "" + shell_exists = False + try: + shell = proc.stdout.decode().split(":")[6].strip() + shell_exists = pathlib.Path(shell).exists() + except Exception: # pylint: disable=broad-except + pass + assert shell_exists is True + + +def test_pkg_paths( + install_salt, pkg_paths, pkg_paths_salt_user, pkg_paths_salt_user_exclusions +): + """ + Test package paths ownership + """ + if packaging.version.parse(install_salt.version) <= packaging.version.parse( + "3006.4" + ): + pytest.skip("Package path ownership was changed in salt 3006.4") + salt_user_subdirs = [] + for _path in pkg_paths: + pkg_path = pathlib.Path(_path) + assert pkg_path.exists() + for dirpath, sub_dirs, files in os.walk(pkg_path): + path = pathlib.Path(dirpath) + # Directories owned by salt:salt or their subdirs/files + if ( + str(path) in pkg_paths_salt_user or str(path) in salt_user_subdirs + ) and str(path) not in pkg_paths_salt_user_exclusions: + assert path.owner() == "salt" + assert path.group() == "salt" + salt_user_subdirs.extend( + [str(path.joinpath(sub_dir)) for sub_dir in sub_dirs] + ) + # Individual files owned by salt user + for file in files: + file_path = path.joinpath(file) + if str(file_path) not in pkg_paths_salt_user_exclusions: + assert file_path.owner() == "salt" + # Directories owned by root:root + else: + assert path.owner() == "root" + assert path.group() == "root" + for file in files: + file_path = path.joinpath(file) + # Individual files owned by salt user + if str(file_path) in pkg_paths_salt_user: + assert file_path.owner() == "salt" + else: + assert file_path.owner() == "root" + assert file_path.group() == "root" + + +@pytest.mark.skip_if_binaries_missing("logrotate") +def test_paths_log_rotation( + salt_master, salt_minion, salt_call_cli, install_salt, pkg_tests_account +): + """ + Test the correct ownership is assigned when log rotation occurs + Change the user in the Salt Master, chage ownership, force logrotation + Check ownership and premissions. + Assumes test_pkg_paths successful + """ + if packaging.version.parse(install_salt.version) <= packaging.version.parse( + "3006.4" + ): + pytest.skip("Package path ownership was changed in salt 3006.4") + + if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"): + pytest.skip( + "Only tests RedHat family packages till logrotation paths are resolved on Ubuntu/Debian, see issue 65231" + ) + + # check that the salt_master is running + assert salt_master.is_running() + match = False + for proc in psutil.Process(salt_master.pid).children(): + assert proc.username() == "salt" + match = True + + assert match + + # Paths created by package installs with adjustment for current conf_dir /etc/salt + log_pkg_paths = [ + install_salt.conf_dir, # "bkup0" + "/var/cache/salt", # "bkup1" + "/var/log/salt", # "bkup2" + "/var/run/salt", # "bkup3" + "/opt/saltstack/salt", # "bkup4" + ] + + # backup those about to change + bkup_count = 0 + bkup_count_max = 5 + with temp_directory("bkup0") as temp_dir_path_0: + with temp_directory("bkup1") as temp_dir_path_1: + with temp_directory("bkup2") as temp_dir_path_2: + with temp_directory("bkup3") as temp_dir_path_3: + with temp_directory("bkup4") as temp_dir_path_4: + + assert temp_dir_path_0.is_dir() + assert temp_dir_path_1.is_dir() + assert temp_dir_path_2.is_dir() + assert temp_dir_path_3.is_dir() + assert temp_dir_path_4.is_dir() + + # stop the salt_master, so can change user + with salt_master.stopped(): + assert salt_master.is_running() is False + + for _path in log_pkg_paths: + if bkup_count == 0: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_0)}/" + ) + elif bkup_count == 1: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_1)}/" + ) + elif bkup_count == 2: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_2)}/" + ) + elif bkup_count == 3: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_3)}/" + ) + elif bkup_count == 4: + cmd_to_run = ( + f"cp -a {_path}/* {str(temp_dir_path_4)}/" + ) + elif bkup_count > 5: + # force assertion + assert bkup_count < bkup_count_max + + ret = salt_call_cli.run( + "--local", "cmd.run", cmd_to_run + ) + bkup_count += 1 + assert ret.returncode == 0 + + # change the user in the master's config file. + ret = salt_call_cli.run( + "--local", + "file.replace", + f"{install_salt.conf_dir}/master", + "user: salt", + f"user: {pkg_tests_account.username}", + "flags=['IGNORECASE']", + "append_if_not_found=True", + ) + assert ret.returncode == 0 + + # change ownership of appropriate paths to user + for _path in log_pkg_paths: + chg_ownership_cmd = ( + f"chown -R {pkg_tests_account.username} {_path}" + ) + ret = salt_call_cli.run( + "--local", "cmd.run", chg_ownership_cmd + ) + assert ret.returncode == 0 + + # restart the salt_master + with salt_master.started(): + assert salt_master.is_running() is True + + # ensure some data in files + log_files_list = [ + "/var/log/salt/api", + "/var/log/salt/key", + "/var/log/salt/master", + ] + for _path in log_files_list: + log_path = pathlib.Path(_path) + assert log_path.exists() + with log_path.open("a") as f: + f.write("This is a log rotation test\n") + + # force log rotation + logr_conf_file = "/etc/logrotate.d/salt" + logr_conf_path = pathlib.Path(logr_conf_file) + if not logr_conf_path.exists(): + logr_conf_file = "/etc/logrotate.conf" + logr_conf_path = pathlib.Path(logr_conf_file) + assert logr_conf_path.exists() + + # force log rotation + log_rotate_cmd = f"logrotate -f {logr_conf_file}" + ret = salt_call_cli.run( + "--local", "cmd.run", log_rotate_cmd + ) + assert ret.returncode == 0 + + for _path in log_files_list: + log_path = pathlib.Path(_path) + assert log_path.exists() + assert ( + log_path.owner() == pkg_tests_account.username + ) + assert log_path.stat().st_mode & 0o7777 == 0o640 + + # cleanup + assert salt_master.is_running() is False + + # change the user in the master's config file. + ret = salt_call_cli.run( + "--local", + "file.replace", + f"{install_salt.conf_dir}/master", + f"user: {pkg_tests_account.username}", + "user: salt", + "flags=['IGNORECASE']", + "append_if_not_found=True", + ) + assert ret.returncode == 0 + + # restore from backed up + bkup_count = 0 + for _path in log_pkg_paths: + if bkup_count == 0: + cmd_to_run = f"cp -a --force {str(temp_dir_path_0)}/* {_path}/" + elif bkup_count == 1: + cmd_to_run = f"cp -a --force {str(temp_dir_path_1)}/* {_path}/" + elif bkup_count == 2: + cmd_to_run = f"cp -a --force {str(temp_dir_path_2)}/* {_path}/" + elif bkup_count == 3: + cmd_to_run = f"cp -a --force {str(temp_dir_path_3)}/* {_path}/" + elif bkup_count == 4: + # use --update since /opt/saltstack/salt and would get SIGSEGV since mucking with running code + cmd_to_run = f"cp -a --update --force {str(temp_dir_path_4)}/* {_path}/" + elif bkup_count > 5: + # force assertion + assert bkup_count < bkup_count_max + + ret = salt_call_cli.run( + "--local", "cmd.run", cmd_to_run + ) + + bkup_count += 1 + assert ret.returncode == 0 diff --git a/tests/pytests/pkg/integration/test_ssm.py b/tests/pytests/pkg/integration/test_ssm.py new file mode 100644 index 000000000000..059766caf17d --- /dev/null +++ b/tests/pytests/pkg/integration/test_ssm.py @@ -0,0 +1,15 @@ +import os + +import pytest + +pytestmark = [ + pytest.mark.skip_unless_on_windows, +] + + +def test_ssm_present(install_salt): + """ + The ssm.exe binary needs to be present in both the zip and the exe/msi + builds + """ + assert os.path.exists(install_salt.ssm_bin) diff --git a/tests/pytests/pkg/integration/test_systemd_config.py b/tests/pytests/pkg/integration/test_systemd_config.py new file mode 100644 index 000000000000..6c530b51db24 --- /dev/null +++ b/tests/pytests/pkg/integration/test_systemd_config.py @@ -0,0 +1,42 @@ +import subprocess + +import pytest + +pytestmark = [ + pytest.mark.skip_on_windows(reason="Linux test only"), +] + + +@pytest.mark.usefixtures("salt_minion") +def test_system_config(grains): + """ + Test system config + """ + if grains["os_family"] == "RedHat": + if grains["osfinger"] in ( + "CentOS Stream-8", + "CentOS Linux-8", + "CentOS Stream-9", + "Fedora Linux-36", + "VMware Photon OS-3", + "VMware Photon OS-4", + "VMware Photon OS-5", + "Amazon Linux-2023", + ): + expected_retcode = 0 + else: + expected_retcode = 1 + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == expected_retcode + + elif grains["os_family"] == "Debian": + if grains["osfinger"] == "Debian-9": + expected_retcode = 1 + else: + expected_retcode = 0 + ret = subprocess.call( + "systemctl show -p ${config} salt-minion.service", shell=True + ) + assert ret == expected_retcode diff --git a/pkg/tests/integration/test_version.py b/tests/pytests/pkg/integration/test_version.py similarity index 71% rename from pkg/tests/integration/test_version.py rename to tests/pytests/pkg/integration/test_version.py index 2b45ecbfd7a4..121aa6fd5506 100644 --- a/pkg/tests/integration/test_version.py +++ b/tests/pytests/pkg/integration/test_version.py @@ -23,12 +23,15 @@ def test_salt_versions_report_master(install_salt): """ Test running --versions-report on master """ + if not install_salt.relenv and not install_salt.classic: + pytest.skip("Unable to get the python version dynamically from tiamat builds") test_bin = os.path.join(*install_salt.binary_paths["master"]) python_bin = os.path.join(*install_salt.binary_paths["python"]) ret = install_salt.proc.run(test_bin, "--versions-report") ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"]) py_version = subprocess.run( [str(python_bin), "--version"], + check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, ).stdout @@ -41,7 +44,22 @@ def test_salt_versions_report_minion(salt_cli, salt_minion): """ Test running test.versions_report on minion """ - ret = salt_cli.run("test.versions_report", minion_tgt=salt_minion.id) + # Make sure the minion is running + assert salt_minion.is_running() + # Make sure we can ping the minion ... + ret = salt_cli.run( + "--timeout=240", "test.ping", minion_tgt=salt_minion.id, _timeout=240 + ) + assert ret.returncode == 0 + assert ret.data is True + ret = salt_cli.run( + "--hard-crash", + "--failhard", + "--timeout=240", + "test.versions_report", + minion_tgt=salt_minion.id, + _timeout=240, + ) ret.stdout.matcher.fnmatch_lines(["*Salt Version:*"]) @@ -52,17 +70,20 @@ def test_compare_versions(version, binary, install_salt): """ Test compare versions """ - if platform.is_windows() and install_salt.singlebin: - pytest.skip( - "Already tested in `test_salt_version`. No need to repeat for " - "Windows single binary installs." - ) - if binary in install_salt.binary_paths: - ret = install_salt.proc.run(*install_salt.binary_paths[binary], "--version") + ret = install_salt.proc.run( + *install_salt.binary_paths[binary], + "--version", + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) ret.stdout.matcher.fnmatch_lines([f"*{version}*"]) else: - pytest.skip(f"Binary not available: {binary}") + if platform.is_windows(): + pytest.skip(f"Binary not available on windows: {binary}") + pytest.fail( + f"Platform is not Windows and yet the binary {binary!r} is not available" + ) @pytest.mark.skip_unless_on_darwin() @@ -89,11 +110,10 @@ def test_symlinks_created(version, symlink, install_salt): """ Test symlinks created """ - if not install_salt.installer_pkg: - pytest.skip( - "This test is for the installer package only (pkg). It does not " - "apply to the tarball" - ) + if install_salt.classic: + pytest.skip("Symlinks not created for classic macos builds, we adjust the path") + if not install_salt.relenv and symlink == "spm": + symlink = "salt-spm" ret = install_salt.proc.run(pathlib.Path("/usr/local/sbin") / symlink, "--version") ret.stdout.matcher.fnmatch_lines([f"*{version}*"]) @@ -106,14 +126,14 @@ def test_compare_pkg_versions_redhat_rc(version, install_salt): package of the same version. For example, v3004~rc1 should be less than v3004. """ - if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora"): + if install_salt.distro_id not in ("centos", "redhat", "amzn", "fedora", "photon"): pytest.skip("Only tests rpm packages") pkg = [x for x in install_salt.pkgs if "rpm" in x] if not pkg: pytest.skip("Not testing rpm packages") pkg = pkg[0].split("/")[-1] - if "rc" not in pkg: + if "rc" not in ".".join(pkg.split(".")[:2]): pytest.skip("Not testing an RC package") assert "~" in pkg comp_pkg = pkg.split("~")[0] diff --git a/tests/pytests/functional/utils/pyinstaller/rthooks/__init__.py b/tests/pytests/pkg/upgrade/__init__.py similarity index 100% rename from tests/pytests/functional/utils/pyinstaller/rthooks/__init__.py rename to tests/pytests/pkg/upgrade/__init__.py diff --git a/tests/pytests/pkg/upgrade/test_salt_upgrade.py b/tests/pytests/pkg/upgrade/test_salt_upgrade.py new file mode 100644 index 000000000000..d844b5807da2 --- /dev/null +++ b/tests/pytests/pkg/upgrade/test_salt_upgrade.py @@ -0,0 +1,44 @@ +import packaging.version +import pytest + + +def test_salt_upgrade(salt_call_cli, install_salt): + """ + Test an upgrade of Salt. + """ + if not install_salt.upgrade: + pytest.skip("Not testing an upgrade, do not run") + + if install_salt.relenv: + original_py_version = install_salt.package_python_version() + + # Verify previous install version is setup correctly and works + ret = salt_call_cli.run("test.version") + assert ret.returncode == 0 + assert packaging.version.parse(ret.data) < packaging.version.parse( + install_salt.artifact_version + ) + + # Test pip install before an upgrade + dep = "PyGithub==1.56.0" + install = salt_call_cli.run("--local", "pip.install", dep) + assert install.returncode == 0 + + # Verify we can use the module dependent on the installed package + repo = "https://github.com/saltstack/salt.git" + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) + assert "Authentication information could" in use_lib.stderr + + # Upgrade Salt from previous version and test + install_salt.install(upgrade=True) + ret = salt_call_cli.run("test.version") + assert ret.returncode == 0 + assert packaging.version.parse(ret.data) == packaging.version.parse( + install_salt.artifact_version + ) + + if install_salt.relenv: + new_py_version = install_salt.package_python_version() + if new_py_version == original_py_version: + # test pip install after an upgrade + use_lib = salt_call_cli.run("--local", "github.get_repo_info", repo) diff --git a/tests/pytests/scenarios/compat/test_with_versions.py b/tests/pytests/scenarios/compat/test_with_versions.py index 75a2b87f24c3..ecb3a73de1a8 100644 --- a/tests/pytests/scenarios/compat/test_with_versions.py +++ b/tests/pytests/scenarios/compat/test_with_versions.py @@ -29,7 +29,7 @@ def _get_test_versions_ids(value): - return "SaltMinion~={}".format(value) + return f"SaltMinion~={value}" @pytest.fixture( @@ -41,13 +41,13 @@ def compat_salt_version(request): @pytest.fixture(scope="module") def minion_image_name(compat_salt_version): - return "salt-{}".format(compat_salt_version) + return f"salt-{compat_salt_version}" @pytest.fixture(scope="function") def minion_id(compat_salt_version): return random_string( - "salt-{}-".format(compat_salt_version), + f"salt-{compat_salt_version}-", uppercase=False, ) @@ -70,7 +70,10 @@ def salt_minion( config_overrides = { "master": salt_master.config["interface"], "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, # We also want to scrutinize the key acceptance "open_mode": False, } diff --git a/tests/pytests/scenarios/failover/multimaster/test_failover_master.py b/tests/pytests/scenarios/failover/multimaster/test_failover_master.py index d811372f4a82..6efecfb83342 100644 --- a/tests/pytests/scenarios/failover/multimaster/test_failover_master.py +++ b/tests/pytests/scenarios/failover/multimaster/test_failover_master.py @@ -6,7 +6,7 @@ import pytest pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.skip_on_freebsd(reason="Processes are not properly killed on FreeBSD"), ] diff --git a/tests/pytests/scenarios/multimaster/test_multimaster.py b/tests/pytests/scenarios/multimaster/test_multimaster.py index 9b239ad4009b..8f8c8cf9055d 100644 --- a/tests/pytests/scenarios/multimaster/test_multimaster.py +++ b/tests/pytests/scenarios/multimaster/test_multimaster.py @@ -9,7 +9,7 @@ ECHO_STR = "The FitnessGram Pacer Test is a multistage aerobic capacity test" pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, ] diff --git a/tests/pytests/scenarios/multimaster/test_offline_master.py b/tests/pytests/scenarios/multimaster/test_offline_master.py index d5995803e513..fe8f6d28bf0b 100644 --- a/tests/pytests/scenarios/multimaster/test_offline_master.py +++ b/tests/pytests/scenarios/multimaster/test_offline_master.py @@ -3,7 +3,7 @@ import pytest pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, ] diff --git a/tests/pytests/scenarios/performance/conftest.py b/tests/pytests/scenarios/performance/conftest.py index d12451b123af..13fbb831d7c9 100644 --- a/tests/pytests/scenarios/performance/conftest.py +++ b/tests/pytests/scenarios/performance/conftest.py @@ -5,17 +5,10 @@ import shutil import pytest -from saltfactories.daemons.container import Container +from saltfactories.utils import random_string -import salt.utils.path from tests.support.sminion import create_sminion -docker = pytest.importorskip("docker") -# pylint: disable=3rd-party-module-not-gated,no-name-in-module -from docker.errors import DockerException # isort:skip - -# pylint: enable=3rd-party-module-not-gated,no-name-in-module - pytestmark = [ pytest.mark.slow_test, pytest.mark.skip_if_binaries_missing("docker"), @@ -26,44 +19,29 @@ @pytest.fixture(scope="session") -def docker_client(): - if docker is None: - pytest.skip("The docker python library is not available") - - if salt.utils.path.which("docker") is None: - pytest.skip("The docker binary is not available") - try: - client = docker.from_env() - connectable = Container.client_connectable(client) - if connectable is not True: # pragma: no cover - pytest.skip(connectable) - return client - except DockerException: - pytest.skip("Failed to get a connection to docker running on the system") - - -@pytest.fixture(scope="session") -def network(): - return "salt-performance" +def docker_network_name(): + return random_string("salt-perf-", uppercase=False) @pytest.fixture(scope="session") -def host_docker_network_ip_address(network): +def host_docker_network_ip_address(docker_network_name): sminion = create_sminion() - network_name = network - network_subnet = "10.0.20.0/24" - network_gateway = "10.0.20.1" + network_subnet = "10.0.21.0/24" + network_gateway = "10.0.21.1" try: ret = sminion.states.docker_network.present( - network_name, + docker_network_name, driver="bridge", ipam_pools=[{"subnet": network_subnet, "gateway": network_gateway}], ) assert isinstance(ret, dict), ret - assert ret["result"], "Failed to create docker network: {}".format(ret) + try: + assert ret["result"] + except AssertionError: + pytest.skip("Failed to create docker network: {}".format(ret)) yield network_gateway finally: - sminion.states.docker_network.absent(network_name) + sminion.states.docker_network.absent(docker_network_name) @pytest.fixture(scope="session") diff --git a/tests/pytests/scenarios/performance/test_performance.py b/tests/pytests/scenarios/performance/test_performance.py index 8f3fa0ac9881..e9e0d0def658 100644 --- a/tests/pytests/scenarios/performance/test_performance.py +++ b/tests/pytests/scenarios/performance/test_performance.py @@ -1,50 +1,55 @@ +import logging import os import shutil -import time +import sys import pytest from pytestshellutils.utils import ports -from saltfactories.daemons import master -from saltfactories.daemons.container import SaltDaemon, SaltMinion +from saltfactories.daemons.container import SaltMaster, SaltMinion from saltfactories.utils import random_string -from salt.version import SaltVersionsInfo, __version__ +from salt.version import SaltVersionsInfo +from tests.conftest import CODE_DIR -pytestmark = [pytest.mark.skip_if_binaries_missing("docker")] +log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.skip_on_photonos, + pytest.mark.skip_if_binaries_missing("docker"), +] -class ContainerMaster(SaltDaemon, master.SaltMaster): - """ - Containerized salt master that has no check events - """ - - def get_display_name(self): - return master.SaltMaster.get_display_name(self) - - def get_check_events(self): - return [] +@pytest.fixture +def prev_version(): + return str(SaltVersionsInfo.previous_release().info[0]) -class ContainerMinion(SaltMinion): - """ - Containerized salt minion that has no check events - """ - def get_check_events(self): - return [] +@pytest.fixture +def prev_container_image(shell, prev_version): + container = f"ghcr.io/saltstack/salt-ci-containers/salt:{prev_version}" + ret = shell.run("docker", "pull", container, check=False) + if ret.returncode: + pytest.skip(f"Failed to pull docker image '{container}':\n{ret}") + return container -# ---------------------- Previous Version Setup ---------------------- +@pytest.fixture +def curr_version(): + return str(SaltVersionsInfo.current_release().info[0]) @pytest.fixture -def prev_version(): - return str(SaltVersionsInfo.previous_release().info[0]) +def curr_container_image(shell): + container = "ghcr.io/saltstack/salt-ci-containers/salt:latest" + ret = shell.run("docker", "pull", container, check=False) + if ret.returncode: + pytest.skip(f"Failed to pull docker image '{container}':\n{ret}") + return container @pytest.fixture def prev_master_id(): - return random_string("master-performance-prev-", uppercase=False) + return random_string("master-perf-prev-", uppercase=False) @pytest.fixture @@ -52,10 +57,10 @@ def prev_master( request, salt_factories, host_docker_network_ip_address, - network, + docker_network_name, prev_version, - docker_client, prev_master_id, + prev_container_image, ): root_dir = salt_factories.get_root_dir_for_daemon(prev_master_id) conf_dir = root_dir / "conf" @@ -64,35 +69,36 @@ def prev_master( config_defaults = { "root_dir": str(root_dir), "transport": request.config.getoption("--transport"), - "user": False, + "user": "root", } - publish_port = ports.get_unused_localhost_port() - ret_port = ports.get_unused_localhost_port() config_overrides = { + "open_mode": True, "interface": "0.0.0.0", - "publish_port": publish_port, - "ret_port": ret_port, + "publish_port": ports.get_unused_localhost_port(), + "ret_port": ports.get_unused_localhost_port(), "log_level_logfile": "quiet", "pytest-master": { "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, }, } factory = salt_factories.salt_master_daemon( prev_master_id, + name=prev_master_id, defaults=config_defaults, overrides=config_overrides, - factory_class=ContainerMaster, - image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version), + factory_class=SaltMaster, base_script_args=["--log-level=debug"], + image=prev_container_image, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": prev_master_id, }, - docker_client=docker_client, - name=prev_master_id, start_timeout=120, - max_start_attempts=1, + max_start_attempts=3, + pull_before_start=False, + skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) with factory.started(): @@ -117,7 +123,7 @@ def prev_salt_run_cli(prev_master): @pytest.fixture def prev_minion_id(): return random_string( - "minion-performance-prev-", + "minion-perf-prev-", uppercase=False, ) @@ -126,34 +132,38 @@ def prev_minion_id(): def prev_minion( prev_minion_id, prev_master, - docker_client, prev_version, host_docker_network_ip_address, - network, - prev_master_id, + docker_network_name, + prev_container_image, ): config_overrides = { - "master": prev_master_id, - "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "master": prev_master.id, + "open_mode": True, + "user": "root", + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, } factory = prev_master.salt_minion_daemon( prev_minion_id, - overrides=config_overrides, - factory_class=ContainerMinion, - # SaltMinion kwargs name=prev_minion_id, - image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version), - docker_client=docker_client, - start_timeout=120, - pull_before_start=False, - skip_if_docker_client_not_connectable=True, + overrides=config_overrides, + factory_class=SaltMinion, + base_script_args=["--log-level=debug"], + image=prev_container_image, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": prev_minion_id, }, - max_start_attempts=1, + start_timeout=120, + max_start_attempts=3, + pull_before_start=False, + skip_on_pull_failure=True, + skip_if_docker_client_not_connectable=True, ) + factory.python_executable = "python3" factory.after_terminate( pytest.helpers.remove_stale_minion_key, prev_master, factory.id ) @@ -167,21 +177,38 @@ def prev_sls(sls_contents, state_tree, tmp_path): location = tmp_path / "prev" / "testfile" location.parent.mkdir() with pytest.helpers.temp_file( - "{}.sls".format(sls_name), sls_contents.format(path=str(location)), state_tree + f"{sls_name}.sls", sls_contents.format(path=str(location)), state_tree ): yield sls_name -# ---------------------- Current Version Setup ---------------------- - - -def _install_local_salt(factory): - factory.run("pip install /saltcode") +def _install_salt_in_container(container): + ret = container.run( + "python3", + "-c", + "import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))", + ) + assert ret.returncode == 0 + if not ret.stdout: + requirements_py_version = "{}.{}".format(*sys.version_info) + else: + requirements_py_version = ret.stdout.strip() + + ret = container.run( + "python3", + "-m", + "pip", + "install", + f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt", + "/salt", + ) + log.debug("Install Salt in the container: %s", ret) + assert ret.returncode == 0 @pytest.fixture def curr_master_id(): - return random_string("master-performance-", uppercase=False) + return random_string("master-perf-curr-", uppercase=False) @pytest.fixture @@ -189,10 +216,9 @@ def curr_master( request, salt_factories, host_docker_network_ip_address, - network, - prev_version, - docker_client, + docker_network_name, curr_master_id, + curr_container_image, ): root_dir = salt_factories.get_root_dir_for_daemon(curr_master_id) conf_dir = root_dir / "conf" @@ -201,43 +227,46 @@ def curr_master( config_defaults = { "root_dir": str(root_dir), "transport": request.config.getoption("--transport"), - "user": False, + "user": "root", } publish_port = ports.get_unused_localhost_port() ret_port = ports.get_unused_localhost_port() config_overrides = { + "open_mode": True, "interface": "0.0.0.0", "publish_port": publish_port, "ret_port": ret_port, "log_level_logfile": "quiet", "pytest-master": { "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, }, } factory = salt_factories.salt_master_daemon( curr_master_id, + name=curr_master_id, defaults=config_defaults, overrides=config_overrides, - factory_class=ContainerMaster, - image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version), + factory_class=SaltMaster, base_script_args=["--log-level=debug"], + image=curr_container_image, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": curr_master_id, # Bind the current code to a directory for pip installing "volumes": { - os.environ["REPO_ROOT_DIR"]: {"bind": "/saltcode", "mode": "z"} + str(CODE_DIR): {"bind": "/salt", "mode": "z"}, }, }, - docker_client=docker_client, - name=curr_master_id, start_timeout=120, - max_start_attempts=1, + max_start_attempts=3, + pull_before_start=False, + skip_on_pull_failure=True, skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_local_salt, factory) + factory.before_start(_install_salt_in_container, factory) with factory.started(): yield factory @@ -260,7 +289,7 @@ def curr_salt_key_cli(curr_master): @pytest.fixture def curr_minion_id(): return random_string( - "minion-performance-curr-", + "minion-perf-curr-", uppercase=False, ) @@ -269,39 +298,41 @@ def curr_minion_id(): def curr_minion( curr_minion_id, curr_master, - docker_client, - prev_version, host_docker_network_ip_address, - network, - curr_master_id, + docker_network_name, + curr_container_image, ): config_overrides = { - "master": curr_master_id, - "user": False, - "pytest-minion": {"log": {"host": host_docker_network_ip_address}}, + "master": curr_master.id, + "open_mode": True, + "user": "root", + "pytest-minion": { + "log": {"host": host_docker_network_ip_address}, + "returner_address": {"host": host_docker_network_ip_address}, + }, } factory = curr_master.salt_minion_daemon( curr_minion_id, - overrides=config_overrides, - factory_class=ContainerMinion, - # SaltMinion kwargs name=curr_minion_id, - image="ghcr.io/saltstack/salt-ci-containers/salt:{}".format(prev_version), - docker_client=docker_client, - start_timeout=120, - pull_before_start=False, - skip_if_docker_client_not_connectable=True, + overrides=config_overrides, + factory_class=SaltMinion, + base_script_args=["--log-level=debug"], + image=curr_container_image, container_run_kwargs={ - "network": network, + "network": docker_network_name, "hostname": curr_minion_id, # Bind the current code to a directory for pip installing "volumes": { - os.environ["REPO_ROOT_DIR"]: {"bind": "/saltcode", "mode": "z"} + str(CODE_DIR): {"bind": "/salt", "mode": "z"}, }, }, - max_start_attempts=1, + start_timeout=120, + max_start_attempts=3, + pull_before_start=False, + skip_on_pull_failure=True, + skip_if_docker_client_not_connectable=True, ) - factory.before_start(_install_local_salt, factory) + factory.before_start(_install_salt_in_container, factory) factory.after_terminate( pytest.helpers.remove_stale_minion_key, curr_master, factory.id ) @@ -315,25 +346,25 @@ def curr_sls(sls_contents, state_tree, tmp_path): location = tmp_path / "curr" / "testfile" location.parent.mkdir() with pytest.helpers.temp_file( - "{}.sls".format(sls_name), sls_contents.format(path=str(location)), state_tree + f"{sls_name}.sls", sls_contents.format(path=str(location)), state_tree ): yield sls_name -def _wait_for_stdout(expected, func, *args, timeout=120, **kwargs): - start = time.time() - while time.time() < start + timeout: - ret = func(*args, **kwargs) - if ret and ret.stdout and expected in ret.stdout: - break - time.sleep(1) - else: - pytest.skip( - f"Skipping test, one or more daemons failed to start: {expected} NOT FOUND IN {ret}" - ) +@pytest.fixture +def perf_state_name(state_tree, curr_master, prev_master): + + # Copy all of the needed files to both master file roots directories + subdir = random_string("perf-state-") + shutil.copytree( + state_tree, os.path.join(curr_master.config["file_roots"]["base"][0], subdir) + ) + shutil.copytree( + state_tree, os.path.join(prev_master.config["file_roots"]["base"][0], subdir) + ) + return subdir -@pytest.mark.flaky(max_runs=4) def test_performance( prev_salt_cli, prev_minion, @@ -349,46 +380,9 @@ def test_performance( curr_minion, prev_sls, curr_sls, + curr_version, + perf_state_name, ): - # Copy all of the needed files to both master file roots directories - subdir = random_string("performance-") - shutil.copytree( - state_tree, os.path.join(curr_master.config["file_roots"]["base"][0], subdir) - ) - shutil.copytree( - state_tree, os.path.join(prev_master.config["file_roots"]["base"][0], subdir) - ) - - # Wait for the old master and minion to start - _wait_for_stdout( - prev_version, prev_master.run, *prev_salt_run_cli.cmdline("--version") - ) - salt_key_cmd = [ - comp - for comp in prev_salt_key_cli.cmdline("-Ay") - if not comp.startswith("--log-level") - ] - _wait_for_stdout(prev_minion.id, prev_master.run, *salt_key_cmd) - _wait_for_stdout( - "Salt: {}".format(prev_version), - prev_master.run, - *prev_salt_cli.cmdline("test.versions", minion_tgt=prev_minion.id), - ) - - # Wait for the new master and minion to start - _wait_for_stdout("3005", curr_master.run, *curr_salt_run_cli.cmdline("--version")) - curr_key_cmd = [ - comp - for comp in curr_salt_key_cli.cmdline("-Ay") - if not comp.startswith("--log-level") - ] - _wait_for_stdout(curr_minion.id, curr_master.run, *curr_key_cmd) - _wait_for_stdout( - "Salt: {}".format("3005"), - curr_master.run, - *curr_salt_cli.cmdline("test.versions", minion_tgt=curr_minion.id), - ) - # Let's now apply the states applies = os.environ.get("SALT_PERFORMANCE_TEST_APPLIES", 3) @@ -409,23 +403,30 @@ def _gather_durations(ret, minion_id): break else: return duration - pytest.skip("Something went wrong with the states, skipping.") + pytest.skip("Something went wrong with the states, skipping.") + + prev_duration = 0 + curr_duration = 0 for _ in range(applies): prev_state_ret = prev_master.run( *prev_salt_cli.cmdline( - "state.apply", f"{subdir}.{prev_sls}", minion_tgt=prev_minion.id + "state.apply", + f"{perf_state_name}.{prev_sls}", + minion_tgt=prev_minion.id, ) ) - prev_duration = _gather_durations(prev_state_ret, prev_minion.id) + prev_duration += _gather_durations(prev_state_ret, prev_minion.id) for _ in range(applies): curr_state_ret = curr_master.run( *curr_salt_cli.cmdline( - "state.apply", f"{subdir}.{curr_sls}", minion_tgt=curr_minion.id + "state.apply", + f"{perf_state_name}.{curr_sls}", + minion_tgt=curr_minion.id, ) ) - curr_duration = _gather_durations(curr_state_ret, curr_minion.id) + curr_duration += _gather_durations(curr_state_ret, curr_minion.id) # We account for network slowness, etc... here. # There is a hard balance here as far as a threshold. diff --git a/tests/pytests/scenarios/reauth/__init__.py b/tests/pytests/scenarios/reauth/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/pytests/scenarios/reauth/conftest.py b/tests/pytests/scenarios/reauth/conftest.py new file mode 100644 index 000000000000..bbefa71e1818 --- /dev/null +++ b/tests/pytests/scenarios/reauth/conftest.py @@ -0,0 +1,44 @@ +import pytest +from saltfactories.utils import random_string + + +@pytest.fixture(scope="package") +def salt_master_factory(salt_factories): + factory = salt_factories.salt_master_daemon( + random_string("reauth-master-"), + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + return factory + + +@pytest.fixture(scope="package") +def salt_master(salt_master_factory): + with salt_master_factory.started(): + yield salt_master_factory + + +@pytest.fixture(scope="package") +def salt_minion_factory(salt_master): + factory = salt_master.salt_minion_daemon( + random_string("reauth-minion-"), + extra_cli_arguments_after_first_start_failure=["--log-level=info"], + ) + return factory + + +@pytest.fixture(scope="package") +def salt_minion(salt_minion_factory): + with salt_minion_factory.started(): + yield salt_minion_factory + + +@pytest.fixture(scope="package") +def salt_key_cli(salt_master): + assert salt_master.is_running() + return salt_master.salt_key_cli() + + +@pytest.fixture(scope="package") +def salt_cli(salt_master): + assert salt_master.is_running() + return salt_master.salt_cli() diff --git a/tests/pytests/scenarios/reauth/test_reauth.py b/tests/pytests/scenarios/reauth/test_reauth.py new file mode 100644 index 000000000000..c9ccb14c0e10 --- /dev/null +++ b/tests/pytests/scenarios/reauth/test_reauth.py @@ -0,0 +1,61 @@ +import logging +import os +import threading +import time + +import pytest + +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.windows_whitelisted, +] + +log = logging.getLogger(__name__) + + +def minion_func(salt_minion, event_listener, salt_master, timeout): + start = time.time() + with salt_minion.started(start_timeout=timeout * 2, max_start_attempts=1): + new_start = time.time() + while time.time() < new_start + (timeout * 2): + if event_listener.get_events( + [(salt_master.id, f"salt/job/*/ret/{salt_minion.id}")], + after_time=start, + ): + break + time.sleep(5) + + +@pytest.fixture(scope="module") +def timeout(): + return int(os.environ.get("SALT_CI_REAUTH_MASTER_WAIT", 150)) + + +def test_reauth(salt_cli, salt_minion, salt_master, timeout, event_listener): + # Make sure they can communicate + assert salt_cli.run("test.ping", minion_tgt=salt_minion.id).data is True + # Stop the master and minion + salt_master.terminate() + salt_minion.terminate() + log.debug( + "Master and minion stopped for reauth test, waiting for %s seconds", timeout + ) + log.debug("Restarting the reauth minion") + + # We need to have the minion attempting to start in a different process + # when we try to start the master + minion_proc = threading.Thread( + target=minion_func, args=(salt_minion, event_listener, salt_master, timeout) + ) + minion_proc.start() + time.sleep(timeout) + log.debug("Restarting the reauth master") + start = time.time() + salt_master.start() + event_listener.wait_for_events( + [(salt_master.id, f"salt/minion/{salt_minion.id}/start")], + after_time=start, + timeout=timeout * 2, + ) + assert salt_cli.run("test.ping", minion_tgt=salt_minion.id).data is True + minion_proc.join() diff --git a/tests/pytests/scenarios/setup/test_install.py b/tests/pytests/scenarios/setup/test_install.py index ba14c7600582..9c506b56cab3 100644 --- a/tests/pytests/scenarios/setup/test_install.py +++ b/tests/pytests/scenarios/setup/test_install.py @@ -3,12 +3,14 @@ """ import json import logging +import os import pathlib import re import sys import pytest +import salt.utils.files import salt.utils.path import salt.utils.platform import salt.version @@ -17,7 +19,7 @@ log = logging.getLogger(__name__) pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.windows_whitelisted, pytest.mark.skip_initial_onedir_failure, pytest.mark.skip_if_binaries_missing(*KNOWN_BINARY_NAMES, check_all=False), @@ -457,3 +459,38 @@ def test_setup_install(virtualenv, cache_dir, use_static_requirements, src_dir): installed_salt_path = installed_salt_path[0] / "salt" salt_generated_version_file_path = installed_salt_path / "_version.txt" assert salt_generated_version_file_path.is_file() + + +def test_salt_install_args( + virtualenv, cache_dir, use_static_requirements, src_dir, tmp_path +): + """ + test building with `install` command with --salt-* + args. For example, --salt-config-dir and --salt-cache-dir. + """ + cache_dir = tmp_path / "cache_dir" + config_dir = tmp_path / "config_dir" + # Let's create the testing virtualenv + with virtualenv as venv: + venv.run(venv.venv_python, "setup.py", "clean", cwd=src_dir) + env = os.environ.copy() + env["GENERATE_SALT_SYSPATHS"] = "True" + ret = venv.run( + venv.venv_python, + "setup.py", + "--salt-config-dir", + str(config_dir), + "--salt-cache-dir", + str(cache_dir), + "install", + cwd=src_dir, + env=env, + ) + assert ret.returncode == 0 + syspath = pathlib.Path(src_dir, "build", "lib", "salt", "_syspaths.py") + assert syspath.exists() + with salt.utils.files.fopen(syspath) as fp: + data = fp.read() + assert str(cache_dir) in data + assert str(config_dir) in data + venv.run(venv.venv_python, "setup.py", "clean", cwd=src_dir) diff --git a/tests/pytests/scenarios/setup/test_man.py b/tests/pytests/scenarios/setup/test_man.py index aafb55f8160c..28f0d6285a35 100644 --- a/tests/pytests/scenarios/setup/test_man.py +++ b/tests/pytests/scenarios/setup/test_man.py @@ -11,7 +11,7 @@ from salt.modules.virtualenv_mod import KNOWN_BINARY_NAMES pytestmark = [ - pytest.mark.slow_test, + pytest.mark.core_test, pytest.mark.skip_on_windows, pytest.mark.skip_on_aix, pytest.mark.skip_initial_onedir_failure, diff --git a/tests/pytests/scenarios/swarm/conftest.py b/tests/pytests/scenarios/swarm/conftest.py index 95a4fe58a35c..8147eb8049de 100644 --- a/tests/pytests/scenarios/swarm/conftest.py +++ b/tests/pytests/scenarios/swarm/conftest.py @@ -41,7 +41,7 @@ def salt_cli(salt_master): @pytest.fixture(scope="package") def minion_count(): # Allow this to be changed via an environment variable if needed - return int(os.environ.get("SALT_CI_MINION_SWARM_COUNT", 20)) + return int(os.environ.get("SALT_CI_MINION_SWARM_COUNT", 15)) @pytest.fixture(scope="package") @@ -53,7 +53,7 @@ def minion_swarm(salt_master, minion_count): with ExitStack() as stack: for idx in range(minion_count): minion_factory = salt_master.salt_minion_daemon( - random_string("swarm-minion-{}-".format(idx)), + random_string(f"swarm-minion-{idx}-"), extra_cli_arguments_after_first_start_failure=["--log-level=info"], ) stack.enter_context(minion_factory.started()) diff --git a/tests/pytests/unit/beacons/test_inotify.py b/tests/pytests/unit/beacons/test_inotify.py index 30a9a91db4b5..16b5f9e840bc 100644 --- a/tests/pytests/unit/beacons/test_inotify.py +++ b/tests/pytests/unit/beacons/test_inotify.py @@ -26,6 +26,14 @@ def configure_loader_modules(): return {inotify: {}} +@pytest.fixture(autouse=True) +def _close_inotify(configure_loader_modules): + try: + yield + finally: + inotify.close({}) + + def test_non_list_config(): config = {} diff --git a/tests/pytests/unit/beacons/test_watchdog.py b/tests/pytests/unit/beacons/test_watchdog.py index df150a028ef5..de622b5ea0d8 100644 --- a/tests/pytests/unit/beacons/test_watchdog.py +++ b/tests/pytests/unit/beacons/test_watchdog.py @@ -45,6 +45,14 @@ def configure_loader_modules(): return {watchdog: {}} +@pytest.fixture(autouse=True) +def _close_watchdog(configure_loader_modules): + try: + yield + finally: + watchdog.close({}) + + def assertValid(config): ret = watchdog.validate(config) assert ret == (True, "Valid beacon configuration") diff --git a/tests/pytests/unit/cli/test_ssh.py b/tests/pytests/unit/cli/test_ssh.py new file mode 100644 index 000000000000..3cc4a5c04192 --- /dev/null +++ b/tests/pytests/unit/cli/test_ssh.py @@ -0,0 +1,16 @@ +from salt.cli.ssh import SaltSSH +from tests.support.mock import MagicMock, call, patch + + +def test_fsclient_destroy_called(minion_opts): + """ + Test that `salt.client.ssh.SSH.fsclient.destroy()` is called. + """ + ssh_mock = MagicMock() + with patch( + "salt.utils.parsers.SaltSSHOptionParser.parse_args", return_value=MagicMock() + ), patch("salt.client.ssh.SSH", return_value=ssh_mock): + parser = SaltSSH() + parser.config = minion_opts + parser.run() + assert ssh_mock.fsclient.mock_calls == [call.destroy()] diff --git a/tests/pytests/unit/client/ssh/test_password.py b/tests/pytests/unit/client/ssh/test_password.py index 8a7794d2f4a8..711306918125 100644 --- a/tests/pytests/unit/client/ssh/test_password.py +++ b/tests/pytests/unit/client/ssh/test_password.py @@ -30,13 +30,16 @@ def test_password_failure(temp_salt_master, tmp_path): opts["arg"] = [] roster = str(tmp_path / "roster") handle_ssh_ret = [ - { - "localhost": { - "retcode": 255, - "stderr": "Permission denied (publickey).\r\n", - "stdout": "", - } - }, + ( + { + "localhost": { + "retcode": 255, + "stderr": "Permission denied (publickey).\r\n", + "stdout": "", + } + }, + 1, + ) ] expected = {"localhost": "Permission denied (publickey)"} display_output = MagicMock() @@ -50,4 +53,4 @@ def test_password_failure(temp_salt_master, tmp_path): with pytest.raises(SystemExit): client.run() display_output.assert_called_once_with(expected, "nested", opts) - assert ret is handle_ssh_ret[0] + assert ret is handle_ssh_ret[0][0] diff --git a/tests/pytests/unit/client/ssh/test_return_events.py b/tests/pytests/unit/client/ssh/test_return_events.py index 1f0b0dbf335f..382a7b2e57ae 100644 --- a/tests/pytests/unit/client/ssh/test_return_events.py +++ b/tests/pytests/unit/client/ssh/test_return_events.py @@ -26,9 +26,7 @@ def test_not_missing_fun_calling_wfuncs(temp_salt_master, tmp_path): opts["tgt"] = "localhost" opts["arg"] = [] roster = str(tmp_path / "roster") - handle_ssh_ret = [ - {"localhost": {}}, - ] + handle_ssh_ret = [({"localhost": {}}, 0)] expected = {"localhost": {}} display_output = MagicMock() @@ -44,7 +42,7 @@ def test_not_missing_fun_calling_wfuncs(temp_salt_master, tmp_path): assert "fun" in ret["localhost"] client.run() display_output.assert_called_once_with(expected, "nested", opts) - assert ret is handle_ssh_ret[0] + assert ret is handle_ssh_ret[0][0] assert len(client.event.fire_event.call_args_list) == 2 assert "fun" in client.event.fire_event.call_args_list[0][0][0] assert "fun" in client.event.fire_event.call_args_list[1][0][0] diff --git a/tests/pytests/unit/client/ssh/test_shell.py b/tests/pytests/unit/client/ssh/test_shell.py index 37065c4c1876..96bc776106b8 100644 --- a/tests/pytests/unit/client/ssh/test_shell.py +++ b/tests/pytests/unit/client/ssh/test_shell.py @@ -4,7 +4,7 @@ import pytest import salt.client.ssh.shell as shell -from tests.support.mock import patch +from tests.support.mock import MagicMock, PropertyMock, patch @pytest.fixture @@ -52,3 +52,49 @@ def test_ssh_shell_exec_cmd(caplog): ret = _shell.exec_cmd("ls {}".format(passwd)) assert not any([x for x in ret if passwd in str(x)]) assert passwd not in caplog.text + + +def test_ssh_shell_exec_cmd_waits_for_term_close_before_reading_exit_status(): + """ + Ensure that the terminal is always closed before accessing its exitstatus. + """ + term = MagicMock() + has_unread_data = PropertyMock(side_effect=(True, True, False)) + exitstatus = PropertyMock( + side_effect=lambda *args: 0 if term._closed is True else None + ) + term.close.side_effect = lambda *args, **kwargs: setattr(term, "_closed", True) + type(term).has_unread_data = has_unread_data + type(term).exitstatus = exitstatus + term.recv.side_effect = (("hi ", ""), ("there", ""), (None, None), (None, None)) + shl = shell.Shell({}, "localhost") + with patch("salt.utils.vt.Terminal", autospec=True, return_value=term): + stdout, stderr, retcode = shl.exec_cmd("do something") + assert stdout == "hi there" + assert stderr == "" + assert retcode == 0 + + +def test_ssh_shell_exec_cmd_returns_status_code_with_highest_bit_set_if_process_dies(): + """ + Ensure that if a child process dies as the result of a signal instead of exiting + regularly, the shell returns the signal code encoded in the lowest seven bits with + the highest one set, not None. + """ + term = MagicMock() + term.exitstatus = None + term.signalstatus = 9 + has_unread_data = PropertyMock(side_effect=(True, True, False)) + type(term).has_unread_data = has_unread_data + term.recv.side_effect = ( + ("", "leave me alone"), + ("", " please"), + (None, None), + (None, None), + ) + shl = shell.Shell({}, "localhost") + with patch("salt.utils.vt.Terminal", autospec=True, return_value=term): + stdout, stderr, retcode = shl.exec_cmd("do something") + assert stdout == "" + assert stderr == "leave me alone please" + assert retcode == 137 diff --git a/tests/pytests/unit/client/ssh/test_single.py b/tests/pytests/unit/client/ssh/test_single.py index f97519d5cc22..c5b733372af4 100644 --- a/tests/pytests/unit/client/ssh/test_single.py +++ b/tests/pytests/unit/client/ssh/test_single.py @@ -1,6 +1,5 @@ -import os +import logging import re -import tempfile from textwrap import dedent import pytest @@ -16,19 +15,17 @@ from salt.client import ssh from tests.support.mock import MagicMock, call, patch +log = logging.getLogger(__name__) + @pytest.fixture -def opts(tmp_path): - return { - "argv": [ - "ssh.set_auth_key", - "root", - "hobn+amNAXSBTiOXEqlBjGB...rsa root@master", - ], - "__role": "master", - "cachedir": str(tmp_path), - "extension_modules": str(tmp_path / "extmods"), - } +def opts(master_opts): + master_opts["argv"] = [ + "ssh.set_auth_key", + "root", + "hobn+amNAXSBTiOXEqlBjGB...rsa root@master", + ] + return master_opts @pytest.fixture @@ -59,7 +56,7 @@ def test_single_opts(opts, target): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) assert single.shell._ssh_opts() == "" @@ -87,7 +84,7 @@ def test_run_with_pre_flight(opts, target, tmp_path): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) cmd_ret = ("Success", "", 0) @@ -122,7 +119,7 @@ def test_run_with_pre_flight_with_args(opts, target, tmp_path): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) cmd_ret = ("Success", "foobar", 0) @@ -156,7 +153,7 @@ def test_run_with_pre_flight_stderr(opts, target, tmp_path): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) cmd_ret = ("", "Error running script", 1) @@ -190,7 +187,7 @@ def test_run_with_pre_flight_script_doesnot_exist(opts, target, tmp_path): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) cmd_ret = ("Success", "", 0) @@ -224,7 +221,7 @@ def test_run_with_pre_flight_thin_dir_exists(opts, target, tmp_path): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) cmd_ret = ("", "", 0) @@ -242,6 +239,39 @@ def test_run_with_pre_flight_thin_dir_exists(opts, target, tmp_path): assert ret == cmd_ret +def test_run_ssh_pre_flight(opts, target, tmp_path): + """ + test Single.run_ssh_pre_flight function + """ + target["ssh_pre_flight"] = str(tmp_path / "script.sh") + single = ssh.Single( + opts, + opts["argv"], + "localhost", + mods={}, + fsclient=None, + thin=salt.utils.thin.thin_path(opts["cachedir"]), + mine=False, + **target, + ) + + cmd_ret = ("Success", "", 0) + mock_flight = MagicMock(return_value=cmd_ret) + mock_cmd = MagicMock(return_value=cmd_ret) + patch_flight = patch("salt.client.ssh.Single.run_ssh_pre_flight", mock_flight) + patch_cmd = patch("salt.client.ssh.Single.cmd_block", mock_cmd) + patch_exec_cmd = patch( + "salt.client.ssh.shell.Shell.exec_cmd", return_value=("", "", 1) + ) + patch_os = patch("os.path.exists", side_effect=[True]) + + with patch_os, patch_flight, patch_cmd, patch_exec_cmd: + ret = single.run() + mock_cmd.assert_called() + mock_flight.assert_called() + assert ret == cmd_ret + + def test_execute_script(opts, target, tmp_path): """ test Single.execute_script() @@ -255,7 +285,7 @@ def test_execute_script(opts, target, tmp_path): thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, winrm=False, - **target + **target, ) exp_ret = ("Success", "", 0) @@ -273,7 +303,7 @@ def test_execute_script(opts, target, tmp_path): ] == mock_cmd.call_args_list -def test_shim_cmd(opts, target): +def test_shim_cmd(opts, target, tmp_path): """ test Single.shim_cmd() """ @@ -287,7 +317,7 @@ def test_shim_cmd(opts, target): mine=False, winrm=False, tty=True, - **target + **target, ) exp_ret = ("Success", "", 0) @@ -295,21 +325,24 @@ def test_shim_cmd(opts, target): patch_cmd = patch("salt.client.ssh.shell.Shell.exec_cmd", mock_cmd) patch_send = patch("salt.client.ssh.shell.Shell.send", return_value=("", "", 0)) patch_rand = patch("os.urandom", return_value=b"5\xd9l\xca\xc2\xff") + tmp_file = tmp_path / "tmp_file" + mock_tmp = MagicMock() + patch_tmp = patch("tempfile.NamedTemporaryFile", mock_tmp) + mock_tmp.return_value.__enter__.return_value.name = tmp_file - with patch_cmd, patch_rand, patch_send: + with patch_cmd, patch_tmp, patch_send: ret = single.shim_cmd(cmd_str="echo test") assert ret == exp_ret assert [ - call("/bin/sh '.35d96ccac2ff.py'"), - call("rm '.35d96ccac2ff.py'"), + call(f"/bin/sh '.{tmp_file.name}'"), + call(f"rm '.{tmp_file.name}'"), ] == mock_cmd.call_args_list -def test_run_ssh_pre_flight(opts, target, tmp_path): +def test_shim_cmd_copy_fails(opts, target, caplog): """ - test Single.run_ssh_pre_flight + test Single.shim_cmd() when copying the file fails """ - target["ssh_pre_flight"] = str(tmp_path / "script.sh") single = ssh.Single( opts, opts["argv"], @@ -320,24 +353,209 @@ def test_run_ssh_pre_flight(opts, target, tmp_path): mine=False, winrm=False, tty=True, - **target + **target, ) - exp_ret = ("Success", "", 0) - mock_cmd = MagicMock(return_value=exp_ret) + ret_cmd = ("Success", "", 0) + mock_cmd = MagicMock(return_value=ret_cmd) patch_cmd = patch("salt.client.ssh.shell.Shell.exec_cmd", mock_cmd) - patch_send = patch("salt.client.ssh.shell.Shell.send", return_value=exp_ret) - exp_tmp = os.path.join( - tempfile.gettempdir(), os.path.basename(target["ssh_pre_flight"]) + ret_send = ("", "General error in file copy", 1) + patch_send = patch("salt.client.ssh.shell.Shell.send", return_value=ret_send) + patch_rand = patch("os.urandom", return_value=b"5\xd9l\xca\xc2\xff") + + with patch_cmd, patch_rand, patch_send: + ret = single.shim_cmd(cmd_str="echo test") + assert ret == ret_send + assert "Could not copy the shim script to target" in caplog.text + mock_cmd.assert_not_called() + + +def test_run_ssh_pre_flight_no_connect(opts, target, tmp_path, caplog): + """ + test Single.run_ssh_pre_flight when you + cannot connect to the target + """ + pre_flight = tmp_path / "script.sh" + pre_flight.write_text("") + target["ssh_pre_flight"] = str(pre_flight) + single = ssh.Single( + opts, + opts["argv"], + "localhost", + mods={}, + fsclient=None, + thin=salt.utils.thin.thin_path(opts["cachedir"]), + mine=False, + winrm=False, + tty=True, + **target, + ) + mock_exec_cmd = MagicMock(return_value=("", "", 1)) + patch_exec_cmd = patch("salt.client.ssh.shell.Shell.exec_cmd", mock_exec_cmd) + tmp_file = tmp_path / "tmp_file" + mock_tmp = MagicMock() + patch_tmp = patch("tempfile.NamedTemporaryFile", mock_tmp) + mock_tmp.return_value.__enter__.return_value.name = tmp_file + ret_send = ( + "", + "ssh: connect to host 192.168.1.186 port 22: No route to host\nscp: Connection closed\n", + 255, ) + send_mock = MagicMock(return_value=ret_send) + patch_send = patch("salt.client.ssh.shell.Shell.send", send_mock) + + with caplog.at_level(logging.TRACE): + with patch_send, patch_exec_cmd, patch_tmp: + ret = single.run_ssh_pre_flight() + + # Flush the logging handler just to be sure + caplog.handler.flush() + + assert "Copying the pre flight script" in caplog.text + assert "Could not copy the pre flight script to target" in caplog.text + assert ret == ret_send + assert send_mock.call_args_list[0][0][0] == tmp_file + target_script = send_mock.call_args_list[0][0][1] + assert re.search(r".[a-z0-9]+", target_script) + mock_exec_cmd.assert_not_called() - with patch_cmd, patch_send: + +def test_run_ssh_pre_flight_permission_denied(opts, target, tmp_path): + """ + test Single.run_ssh_pre_flight when you + cannot copy script to the target due to + a permission denied error + """ + pre_flight = tmp_path / "script.sh" + pre_flight.write_text("") + target["ssh_pre_flight"] = str(pre_flight) + single = ssh.Single( + opts, + opts["argv"], + "localhost", + mods={}, + fsclient=None, + thin=salt.utils.thin.thin_path(opts["cachedir"]), + mine=False, + winrm=False, + tty=True, + **target, + ) + mock_exec_cmd = MagicMock(return_value=("", "", 1)) + patch_exec_cmd = patch("salt.client.ssh.shell.Shell.exec_cmd", mock_exec_cmd) + tmp_file = tmp_path / "tmp_file" + mock_tmp = MagicMock() + patch_tmp = patch("tempfile.NamedTemporaryFile", mock_tmp) + mock_tmp.return_value.__enter__.return_value.name = tmp_file + ret_send = ( + "", + 'scp: dest open "/tmp/preflight.sh": Permission denied\nscp: failed to upload file /etc/salt/preflight.sh to /tmp/preflight.sh\n', + 255, + ) + send_mock = MagicMock(return_value=ret_send) + patch_send = patch("salt.client.ssh.shell.Shell.send", send_mock) + + with patch_send, patch_exec_cmd, patch_tmp: ret = single.run_ssh_pre_flight() - assert ret == exp_ret - assert [ - call("/bin/sh '{}'".format(exp_tmp)), - call("rm '{}'".format(exp_tmp)), - ] == mock_cmd.call_args_list + assert ret == ret_send + assert send_mock.call_args_list[0][0][0] == tmp_file + target_script = send_mock.call_args_list[0][0][1] + assert re.search(r".[a-z0-9]+", target_script) + mock_exec_cmd.assert_not_called() + + +def test_run_ssh_pre_flight_connect(opts, target, tmp_path, caplog): + """ + test Single.run_ssh_pre_flight when you + can connect to the target + """ + pre_flight = tmp_path / "script.sh" + pre_flight.write_text("") + target["ssh_pre_flight"] = str(pre_flight) + single = ssh.Single( + opts, + opts["argv"], + "localhost", + mods={}, + fsclient=None, + thin=salt.utils.thin.thin_path(opts["cachedir"]), + mine=False, + winrm=False, + tty=True, + **target, + ) + ret_exec_cmd = ("", "", 1) + mock_exec_cmd = MagicMock(return_value=ret_exec_cmd) + patch_exec_cmd = patch("salt.client.ssh.shell.Shell.exec_cmd", mock_exec_cmd) + tmp_file = tmp_path / "tmp_file" + mock_tmp = MagicMock() + patch_tmp = patch("tempfile.NamedTemporaryFile", mock_tmp) + mock_tmp.return_value.__enter__.return_value.name = tmp_file + ret_send = ( + "", + "\rroot@192.168.1.187's password: \n\rpreflight.sh 0% 0 0.0KB/s --:-- ETA\rpreflight.sh 100% 20 2.7KB/s 00:00 \n", + 0, + ) + send_mock = MagicMock(return_value=ret_send) + patch_send = patch("salt.client.ssh.shell.Shell.send", send_mock) + + with caplog.at_level(logging.TRACE): + with patch_send, patch_exec_cmd, patch_tmp: + ret = single.run_ssh_pre_flight() + + # Flush the logging handler just to be sure + caplog.handler.flush() + + assert "Executing the pre flight script on target" in caplog.text + assert ret == ret_exec_cmd + assert send_mock.call_args_list[0][0][0] == tmp_file + target_script = send_mock.call_args_list[0][0][1] + assert re.search(r".[a-z0-9]+", target_script) + mock_exec_cmd.assert_called() + + +def test_run_ssh_pre_flight_shutil_fails(opts, target, tmp_path): + """ + test Single.run_ssh_pre_flight when cannot + copyfile with shutil + """ + pre_flight = tmp_path / "script.sh" + pre_flight.write_text("") + target["ssh_pre_flight"] = str(pre_flight) + single = ssh.Single( + opts, + opts["argv"], + "localhost", + mods={}, + fsclient=None, + thin=salt.utils.thin.thin_path(opts["cachedir"]), + mine=False, + winrm=False, + tty=True, + **target, + ) + ret_exec_cmd = ("", "", 1) + mock_exec_cmd = MagicMock(return_value=ret_exec_cmd) + patch_exec_cmd = patch("salt.client.ssh.shell.Shell.exec_cmd", mock_exec_cmd) + tmp_file = tmp_path / "tmp_file" + mock_tmp = MagicMock() + patch_tmp = patch("tempfile.NamedTemporaryFile", mock_tmp) + mock_tmp.return_value.__enter__.return_value.name = tmp_file + send_mock = MagicMock() + mock_shutil = MagicMock(side_effect=IOError("Permission Denied")) + patch_shutil = patch("shutil.copyfile", mock_shutil) + patch_send = patch("salt.client.ssh.shell.Shell.send", send_mock) + + with patch_send, patch_exec_cmd, patch_tmp, patch_shutil: + ret = single.run_ssh_pre_flight() + + assert ret == ( + "", + "Could not copy pre flight script to temporary path", + 1, + ) + mock_exec_cmd.assert_not_called() + send_mock.assert_not_called() @pytest.mark.skip_on_windows(reason="SSH_PY_SHIM not set on windows") @@ -355,7 +573,7 @@ def test_cmd_run_set_path(opts, target): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) ret = single._cmd_str() @@ -376,7 +594,7 @@ def test_cmd_run_not_set_path(opts, target): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) ret = single._cmd_str() @@ -395,7 +613,7 @@ def test_cmd_block_python_version_error(opts, target): thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, winrm=False, - **target + **target, ) mock_shim = MagicMock( return_value=(("", "ERROR: Unable to locate appropriate python command\n", 10)) @@ -434,7 +652,9 @@ def test_run_with_pre_flight_args(opts, target, test_opts, tmp_path): and script successfully runs """ opts["ssh_run_pre_flight"] = True - target["ssh_pre_flight"] = str(tmp_path / "script.sh") + pre_flight_script = tmp_path / "script.sh" + pre_flight_script.write_text("") + target["ssh_pre_flight"] = str(pre_flight_script) if test_opts[0] is not None: target["ssh_pre_flight_args"] = test_opts[0] @@ -448,7 +668,7 @@ def test_run_with_pre_flight_args(opts, target, test_opts, tmp_path): fsclient=None, thin=salt.utils.thin.thin_path(opts["cachedir"]), mine=False, - **target + **target, ) cmd_ret = ("Success", "", 0) @@ -456,14 +676,15 @@ def test_run_with_pre_flight_args(opts, target, test_opts, tmp_path): mock_exec_cmd = MagicMock(return_value=("", "", 0)) patch_cmd = patch("salt.client.ssh.Single.cmd_block", mock_cmd) patch_exec_cmd = patch("salt.client.ssh.shell.Shell.exec_cmd", mock_exec_cmd) - patch_shell_send = patch("salt.client.ssh.shell.Shell.send", return_value=None) + patch_shell_send = patch( + "salt.client.ssh.shell.Shell.send", return_value=("", "", 0) + ) patch_os = patch("os.path.exists", side_effect=[True]) with patch_os, patch_cmd, patch_exec_cmd, patch_shell_send: - ret = single.run() - assert mock_exec_cmd.mock_calls[0].args[ - 0 - ] == "/bin/sh '/tmp/script.sh'{}".format(expected_args) + single.run() + script_args = mock_exec_cmd.mock_calls[0].args[0] + assert re.search(r"\/bin\/sh '.[a-z0-9]+", script_args) @pytest.mark.slow_test diff --git a/tests/pytests/unit/client/ssh/test_ssh.py b/tests/pytests/unit/client/ssh/test_ssh.py index 9eb3899fab57..e3baf3f5d357 100644 --- a/tests/pytests/unit/client/ssh/test_ssh.py +++ b/tests/pytests/unit/client/ssh/test_ssh.py @@ -3,10 +3,11 @@ import salt.client.ssh.client import salt.utils.msgpack from salt.client import ssh -from tests.support.mock import MagicMock, patch +from tests.support.mock import MagicMock, Mock, patch pytestmark = [ pytest.mark.skip_if_binaries_missing("ssh", "ssh-keygen", check_all=True), + pytest.mark.slow_test, ] @@ -338,3 +339,170 @@ def test_extra_filerefs(tmp_path, opts): with patch("salt.roster.get_roster_file", MagicMock(return_value=roster)): ssh_obj = client._prep_ssh(**ssh_opts) assert ssh_obj.opts.get("extra_filerefs", None) == "salt://foobar" + + +def test_key_deploy_permission_denied_scp(tmp_path, opts): + """ + test "key_deploy" function when + permission denied authentication error + when attempting to use scp to copy file + to target + """ + host = "localhost" + passwd = "password" + usr = "ssh-usr" + opts["ssh_user"] = usr + opts["tgt"] = host + + ssh_ret = { + host: { + "stdout": "\rroot@192.168.1.187's password: \n\rroot@192.168.1.187's password: \n\rroot@192.168.1.187's password: \n", + "stderr": "Permission denied, please try again.\nPermission denied, please try again.\nroot@192.168.1.187: Permission denied (publickey,gssapi-keyex,gssapi-with-micimport pudb; pu.dbassword).\nscp: Connection closed\n", + "retcode": 255, + } + } + key_run_ret = { + "localhost": { + "jid": "20230922155652279959", + "return": "test", + "retcode": 0, + "id": "test", + "fun": "cmd.run", + "fun_args": ["echo test"], + } + } + patch_roster_file = patch("salt.roster.get_roster_file", MagicMock(return_value="")) + with patch_roster_file: + client = ssh.SSH(opts) + patch_input = patch("builtins.input", side_effect=["y"]) + patch_getpass = patch("getpass.getpass", return_value=["password"]) + mock_key_run = MagicMock(return_value=key_run_ret) + patch_key_run = patch("salt.client.ssh.SSH._key_deploy_run", mock_key_run) + with patch_input, patch_getpass, patch_key_run: + ret = client.key_deploy(host, ssh_ret) + assert mock_key_run.call_args_list[0][0] == ( + host, + {"passwd": [passwd], "host": host, "user": usr}, + True, + ) + assert ret == key_run_ret + assert mock_key_run.call_count == 1 + + +def test_key_deploy_permission_denied_file_scp(tmp_path, opts): + """ + test "key_deploy" function when permission denied + due to not having access to copy the file to the target + We do not want to deploy the key, because this is not + an authentication to the target error. + """ + host = "localhost" + passwd = "password" + usr = "ssh-usr" + opts["ssh_user"] = usr + opts["tgt"] = host + + mock_key_run = MagicMock(return_value=False) + patch_key_run = patch("salt.client.ssh.SSH._key_deploy_run", mock_key_run) + + ssh_ret = { + "localhost": { + "stdout": "", + "stderr": 'scp: dest open "/tmp/preflight.sh": Permission denied\nscp: failed to upload file /etc/salt/preflight.sh to /tmp/preflight.sh\n', + "retcode": 1, + } + } + patch_roster_file = patch("salt.roster.get_roster_file", MagicMock(return_value="")) + with patch_roster_file: + client = ssh.SSH(opts) + ret = client.key_deploy(host, ssh_ret) + assert ret == ssh_ret + assert mock_key_run.call_count == 0 + + +def test_key_deploy_no_permission_denied(tmp_path, opts): + """ + test "key_deploy" function when no permission denied + is returned + """ + host = "localhost" + passwd = "password" + usr = "ssh-usr" + opts["ssh_user"] = usr + opts["tgt"] = host + + mock_key_run = MagicMock(return_value=False) + patch_key_run = patch("salt.client.ssh.SSH._key_deploy_run", mock_key_run) + ssh_ret = { + "localhost": { + "jid": "20230922161937998385", + "return": "test", + "retcode": 0, + "id": "test", + "fun": "cmd.run", + "fun_args": ["echo test"], + } + } + patch_roster_file = patch("salt.roster.get_roster_file", MagicMock(return_value="")) + with patch_roster_file: + client = ssh.SSH(opts) + ret = client.key_deploy(host, ssh_ret) + assert ret == ssh_ret + assert mock_key_run.call_count == 0 + + +@pytest.mark.parametrize("retcode,expected", [("null", None), ('"foo"', "foo")]) +def test_handle_routine_remote_invalid_retcode(opts, target, retcode, expected, caplog): + """ + Ensure that if a remote returns an invalid retcode as part of the return dict, + the final exit code is still an integer and set to 1 at least. + """ + single_ret = (f'{{"local": {{"retcode": {retcode}, "return": "foo"}}}}', "", 0) + opts["tgt"] = "localhost" + single = MagicMock(spec=ssh.Single) + single.id = "localhost" + single.run.return_value = single_ret + que = Mock() + + with patch("salt.roster.get_roster_file", MagicMock(return_value="")), patch( + "salt.client.ssh.Single", autospec=True, return_value=single + ): + client = ssh.SSH(opts) + client.handle_routine(que, opts, "localhost", target) + que.put.assert_called_once_with( + ({"id": "localhost", "ret": {"retcode": expected, "return": "foo"}}, 1) + ) + assert f"Host 'localhost' reported an invalid retcode: '{expected}'" in caplog.text + + +def test_handle_routine_single_run_invalid_retcode(opts, target, caplog): + """ + Ensure that if Single.run() call returns an invalid retcode, + the final exit code is still an integer and set to 1 at least. + """ + single_ret = ("", "Something went seriously wrong", None) + opts["tgt"] = "localhost" + single = MagicMock(spec=ssh.Single) + single.id = "localhost" + single.run.return_value = single_ret + que = Mock() + + with patch("salt.roster.get_roster_file", MagicMock(return_value="")), patch( + "salt.client.ssh.Single", autospec=True, return_value=single + ): + client = ssh.SSH(opts) + client.handle_routine(que, opts, "localhost", target) + que.put.assert_called_once_with( + ( + { + "id": "localhost", + "ret": { + "stdout": "", + "stderr": "Something went seriously wrong", + "retcode": 1, + }, + }, + 1, + ) + ) + assert "Got an invalid retcode for host 'localhost': 'None'" in caplog.text diff --git a/tests/pytests/unit/client/ssh/test_ssh_classes.py b/tests/pytests/unit/client/ssh/test_ssh_classes.py new file mode 100644 index 000000000000..cabd4ff17224 --- /dev/null +++ b/tests/pytests/unit/client/ssh/test_ssh_classes.py @@ -0,0 +1,82 @@ +import logging + +import pytest +from saltfactories.utils.tempfiles import temp_directory + +import salt.client.ssh.__init__ as dunder_ssh +from salt.exceptions import SaltClientError, SaltSystemExit +from tests.support.mock import MagicMock, patch + +pytestmark = [pytest.mark.skip_unless_on_linux(reason="Test ssh only run on Linux")] + + +log = logging.getLogger(__name__) + + +def test_salt_refs(): + data_strg_cats = "cats" + ret = dunder_ssh.salt_refs(data_strg_cats) + assert ret == [] + + data_strg_proto = "salt://test_salt_ref" + ret = dunder_ssh.salt_refs(data_strg_proto) + assert ret == [data_strg_proto] + + data_list_no_proto = ["cats"] + ret = dunder_ssh.salt_refs(data_list_no_proto) + assert ret == [] + + data_list_proto = ["salt://test_salt_ref1", "salt://test_salt_ref2", "cats"] + ret = dunder_ssh.salt_refs(data_list_proto) + assert ret == ["salt://test_salt_ref1", "salt://test_salt_ref2"] + + +def test_convert_args(): + test_args = [ + "arg1", + {"key1": "value1", "key2": "value2", "__kwarg__": "kwords"}, + "dog1", + ] + expected = ["arg1", "key1=value1", "key2=value2", "dog1"] + ret = dunder_ssh._convert_args(test_args) + assert ret == expected + + +def test_ssh_class(): + + with temp_directory() as temp_dir: + assert temp_dir.is_dir() + opts = { + "sock_dir": temp_dir, + "regen_thin": False, + "__master_opts__": {"pki_dir": "pki"}, + "selected_target_option": None, + "tgt": "*", + "tgt_type": "glob", + "fileserver_backend": ["roots"], + "cachedir": "/tmp", + "thin_extra_mods": "", + "ssh_ext_alternatives": None, + } + + with patch("salt.utils.path.which", return_value=""), pytest.raises( + SaltSystemExit + ) as err: + test_ssh = dunder_ssh.SSH(opts) + assert ( + "salt-ssh could not be run because it could not generate keys." + in str(err.value) + ) + + with patch("salt.utils.path.which", return_value="/usr/bin/ssh"), patch( + "os.path.isfile", return_value=False + ), patch( + "salt.client.ssh.shell.gen_key", MagicMock(side_effect=OSError()) + ), pytest.raises( + SaltClientError + ) as err: + test_ssh = dunder_ssh.SSH(opts) + assert ( + "salt-ssh could not be run because it could not generate keys." + in err.value + ) diff --git a/tests/pytests/unit/client/test_init.py b/tests/pytests/unit/client/test_init.py new file mode 100644 index 000000000000..90fb91b0070b --- /dev/null +++ b/tests/pytests/unit/client/test_init.py @@ -0,0 +1,275 @@ +import pytest + +import salt.client +from salt.exceptions import SaltInvocationError + + +@pytest.fixture +def local_client(): + return salt.client.get_local_client() + + +def test_get_local_client(local_client): + """ + Test that a local client is created + """ + assert isinstance(local_client, salt.client.LocalClient) + + +def test_get_local_client_mopts(master_opts): + master_opts["rest_cherrypy"] = {"port": 8000} + local_client = salt.client.get_local_client(mopts=master_opts) + assert isinstance(local_client, salt.client.LocalClient) + assert local_client.opts == master_opts + + +@pytest.mark.parametrize( + "val, expected", + ((None, 5), (7, 7), ("9", 9), ("eleven", 5), (["13"], 5)), +) +def test_local_client_get_timeout(local_client, val, expected): + assert local_client._get_timeout(timeout=val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("group1", ["L@spongebob,patrick"]), + ("group2", ["G@os:squidward"]), + ("group3", ["(", "G@os:plankton", "and", "(", "L@spongebob,patrick", ")", ")"]), + ), +) +def test_resolve_nodegroup(master_opts, val, expected): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + assert local_client._resolve_nodegroup(val) == expected + + +def test_resolve_nodegroup_error(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + with pytest.raises(SaltInvocationError): + local_client._resolve_nodegroup("missing") + + +def test_prep_pub(local_client): + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_kwargs(local_client): + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + some_kwarg="spongebob", + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "user": local_client.salt_user, + "kwargs": { + "some_kwarg": "spongebob", + }, + } + assert result == expected + + +def test_prep_pub_order_masters(master_opts): + master_opts["order_masters"] = True + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="*", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "*", + "tgt_type": "glob", + "to": 7, + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_nodegroup(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="group1", + fun="test.ping", + arg="", + tgt_type="nodegroup", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "L@spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_compound(local_client): + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="compound", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_compound_nodegroup(master_opts): + master_opts["nodegroups"] = { + "group1": "L@spongebob,patrick", + "group2": "G@os:squidward", + "group3": "G@os:plankton and N@group1", + } + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="N@group1", + fun="test.ping", + arg="", + tgt_type="compound", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "", + "tgt": "L@spongebob,patrick", + "tgt_type": "compound", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_ext_job_cache(master_opts): + master_opts["ext_job_cache"] = "mysql" + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="glob", + ret="", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "mysql", + "tgt": "spongebob,patrick", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected + + +def test_prep_pub_ext_job_cache_existing(master_opts): + master_opts["ext_job_cache"] = "mysql" + local_client = salt.client.get_local_client(mopts=master_opts) + result = local_client._prep_pub( + tgt="spongebob,patrick", + fun="test.ping", + arg="", + tgt_type="glob", + ret="postgres", + jid="123", + timeout=7, + ) + expected = { + "arg": "", + "cmd": "publish", + "fun": "test.ping", + "jid": "123", + "key": "", + "ret": "postgres,mysql", + "tgt": "spongebob,patrick", + "tgt_type": "glob", + "user": local_client.salt_user, + } + assert result == expected diff --git a/tests/pytests/unit/cloud/test_cloud.py b/tests/pytests/unit/cloud/test_cloud.py index 303374a37153..ecdab4de5758 100644 --- a/tests/pytests/unit/cloud/test_cloud.py +++ b/tests/pytests/unit/cloud/test_cloud.py @@ -1,6 +1,7 @@ import pytest from salt.cloud import Cloud +from salt.exceptions import SaltCloudSystemExit from tests.support.mock import MagicMock, patch @@ -123,3 +124,26 @@ def test_vm_config_merger(): } vm = Cloud.vm_config("test_vm", main, provider, profile, {}) assert expected == vm + + +@pytest.mark.skip_on_fips_enabled_platform +def test_cloud_run_profile_create_returns_boolean(master_config): + + master_config["profiles"] = {"test_profile": {"provider": "test_provider:saltify"}} + master_config["providers"] = { + "test_provider": { + "saltify": {"profiles": {"provider": "test_provider:saltify"}} + } + } + master_config["show_deploy_args"] = False + + cloud = Cloud(master_config) + with patch.object(cloud, "create", return_value=True): + ret = cloud.run_profile("test_profile", ["test_vm"]) + assert ret == {"test_vm": True} + + cloud = Cloud(master_config) + with patch.object(cloud, "create", return_value=False): + with pytest.raises(SaltCloudSystemExit): + ret = cloud.run_profile("test_profile", ["test_vm"]) + assert ret == {"test_vm": False} diff --git a/tests/pytests/unit/cloud/test_map.py b/tests/pytests/unit/cloud/test_map.py index 7a6d3f5fdca8..ce2999003e71 100644 --- a/tests/pytests/unit/cloud/test_map.py +++ b/tests/pytests/unit/cloud/test_map.py @@ -18,7 +18,7 @@ "vmware": { "driver": "vmware", "password": "123456", - "url": "vca1.saltstack.com", + "url": "vca1.localhost", "minion": {"master": "providermaster", "grains": {"providergrain": True}}, "profiles": {}, "user": "root", @@ -31,7 +31,7 @@ "profiles": {}, "minion": {"master": "providermaster", "grains": {"providergrain": True}}, "image": "rhel6_64prod", - "url": "vca2.saltstack.com", + "url": "vca2.localhost", "user": "root", } }, @@ -99,10 +99,14 @@ def salt_cloud_config_file(salt_master_factory): return os.path.join(salt_master_factory.config_dir, "cloud") -def test_cloud_map_merge_conf(salt_cloud_config_file): +# The cloud map merge uses python's multiprocessing manager which authenticates using HMAC and MD5 +@pytest.mark.skip_on_fips_enabled_platform +def test_cloud_map_merge_conf(salt_cloud_config_file, grains): """ Ensure that nested values can be selectivly overridden in a map file """ + if grains["os"] == "VMware Photon OS" and grains["osmajorrelease"] == 3: + pytest.skip("Test hangs on PhotonOS 3") with patch( "salt.config.check_driver_dependencies", MagicMock(return_value=True) ), patch("salt.cloud.Map.read", MagicMock(return_value=EXAMPLE_MAP)): @@ -158,7 +162,7 @@ def test_cloud_map_merge_conf(salt_cloud_config_file): "profile": "nyc-vm", "provider": "nyc_vcenter:vmware", "resourcepool": "Resources", - "url": "vca1.saltstack.com", + "url": "vca1.localhost", "user": "root", }, "db2": { @@ -196,7 +200,7 @@ def test_cloud_map_merge_conf(salt_cloud_config_file): "profile": "nyc-vm", "provider": "nj_vcenter:vmware", "resourcepool": "Resources", - "url": "vca2.saltstack.com", + "url": "vca2.localhost", "user": "root", }, "db3": { @@ -216,7 +220,7 @@ def test_cloud_map_merge_conf(salt_cloud_config_file): "profile": "nj-vm", "provider": "nj_vcenter:vmware", "resourcepool": "Resources", - "url": "vca2.saltstack.com", + "url": "vca2.localhost", "user": "root", }, } diff --git a/tests/pytests/unit/config/test__validate_opts.py b/tests/pytests/unit/config/test__validate_opts.py index 3359a55bd85c..741631e6f818 100644 --- a/tests/pytests/unit/config/test__validate_opts.py +++ b/tests/pytests/unit/config/test__validate_opts.py @@ -401,3 +401,26 @@ def test_dict_bool_none_types(option_value, expected): """ result = salt.config._validate_opts({"ssl": option_value}) assert result is expected + + +@pytest.mark.parametrize( + "option_value,expected", + [ + ([1, 2, 3], False), # list + ((1, 2, 3), False), # tuple + ({"key": "value"}, False), # dict + ("str", False), # str + (True, True), # bool + (1, True), # int + (0.123, False), # float + (None, False), # None + ], +) +def test_bool_int_types(option_value, expected): + """ + Some config settings have two types, bool and int. In that case, bool and + int should evaluate as True. All others should return False. + state_queue is a bool/int config option + """ + result = salt.config._validate_opts({"state_queue": option_value}) + assert result is expected diff --git a/tests/pytests/unit/conftest.py b/tests/pytests/unit/conftest.py index 43deeaa618ef..587fc43babc1 100644 --- a/tests/pytests/unit/conftest.py +++ b/tests/pytests/unit/conftest.py @@ -12,6 +12,9 @@ def minion_opts(tmp_path): opts = salt.config.DEFAULT_MINION_OPTS.copy() opts["__role"] = "minion" opts["root_dir"] = str(root_dir) + opts["master_uri"] = "tcp://{ip}:{port}".format( + ip="127.0.0.1", port=opts["master_port"] + ) for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"): dirpath = root_dir / name dirpath.mkdir(parents=True) @@ -35,3 +38,21 @@ def master_opts(tmp_path): opts[name] = str(dirpath) opts["log_file"] = "logs/master.log" return opts + + +@pytest.fixture +def syndic_opts(tmp_path): + """ + Default master configuration with relative temporary paths to not require root permissions. + """ + root_dir = tmp_path / "syndic" + opts = salt.config.DEFAULT_MINION_OPTS.copy() + opts["syndic_master"] = "127.0.0.1" + opts["__role"] = "minion" + opts["root_dir"] = str(root_dir) + for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"): + dirpath = root_dir / name + dirpath.mkdir(parents=True) + opts[name] = str(dirpath) + opts["log_file"] = "logs/syndic.log" + return opts diff --git a/tests/pytests/unit/daemons/masterapi/test_local_funcs.py b/tests/pytests/unit/daemons/masterapi/test_local_funcs.py index cc4e7255b13b..06e80f7be83b 100644 --- a/tests/pytests/unit/daemons/masterapi/test_local_funcs.py +++ b/tests/pytests/unit/daemons/masterapi/test_local_funcs.py @@ -5,15 +5,30 @@ import salt.utils.platform from tests.support.mock import MagicMock, patch -pytestmark = [pytest.mark.slow_test] +pytestmark = [ + pytest.mark.slow_test, +] @pytest.fixture -def local_funcs(): +def check_keys(): + return { + "test": "mGXdurU1c8lXt5cmpbGq4rWvrOvDXxkwI9gbkP5CBBjpyGWuB8vkgz9r+sjjG0wVDL9/uFuREtk=", + "root": "2t5HHv/ek2wIFh8tTX2c3hdt+6V+93xKlcXb7IlGLIszOeCVv2NuH38LyCw9UwQTfUFTeseXhSs=", + } + + +@pytest.fixture +def local_funcs(master_opts): opts = salt.config.master_config(None) return masterapi.LocalFuncs(opts, "test-key") +@pytest.fixture +def check_local_funcs(master_opts, check_keys): + return masterapi.LocalFuncs(master_opts, check_keys) + + # runner tests @@ -508,3 +523,52 @@ def test_publish_user_authorization_error(local_funcs): "salt.utils.minions.CkMinions.auth_check", MagicMock(return_value=False) ): assert mock_ret == local_funcs.publish(load) + + +def test_dual_key_auth(check_local_funcs): + """ + Test for check for presented dual keys (salt, root) are authenticated + """ + load = { + "user": "test", + "fun": "test.arg", + "tgt": "test_minion", + "kwargs": {"user": "test"}, + "arg": "foo", + "key": "mGXdurU1c8lXt5cmpbGq4rWvrOvDXxkwI9gbkP5CBBjpyGWuB8vkgz9r+sjjG0wVDL9/uFuREtk=", + } + with patch( + "salt.acl.PublisherACL.user_is_blacklisted", MagicMock(return_value=False) + ), patch( + "salt.acl.PublisherACL.cmd_is_blacklisted", MagicMock(return_value=False) + ), patch( + "salt.utils.master.get_values_of_matching_keys", + MagicMock(return_value=["test"]), + ): + results = check_local_funcs.publish(load) + assert results == {"enc": "clear", "load": {"jid": None, "minions": []}} + + +def test_dual_key_auth_sudo(check_local_funcs): + """ + Test for check for presented dual keys (salt, root) are authenticated + with a sudo user + """ + load = { + "user": "sudo_test", + "fun": "test.arg", + "tgt": "test_minion", + "kwargs": {"user": "sudo_test"}, + "arg": "foo", + "key": "mGXdurU1c8lXt5cmpbGq4rWvrOvDXxkwI9gbkP5CBBjpyGWuB8vkgz9r+sjjG0wVDL9/uFuREtk=", + } + with patch( + "salt.acl.PublisherACL.user_is_blacklisted", MagicMock(return_value=False) + ), patch( + "salt.acl.PublisherACL.cmd_is_blacklisted", MagicMock(return_value=False) + ), patch( + "salt.utils.master.get_values_of_matching_keys", + MagicMock(return_value=["test"]), + ): + results = check_local_funcs.publish(load) + assert results == {"enc": "clear", "load": {"jid": None, "minions": []}} diff --git a/tests/pytests/unit/daemons/masterapi/test_remote_funcs.py b/tests/pytests/unit/daemons/masterapi/test_remote_funcs.py index 3a18f10e20d8..8151dcaf006e 100644 --- a/tests/pytests/unit/daemons/masterapi/test_remote_funcs.py +++ b/tests/pytests/unit/daemons/masterapi/test_remote_funcs.py @@ -5,7 +5,9 @@ import salt.utils.platform from tests.support.mock import MagicMock, patch -pytestmark = [pytest.mark.slow_test] +pytestmark = [ + pytest.mark.slow_test, +] class FakeCache: diff --git a/tests/pytests/unit/fileclient/test_fileclient.py b/tests/pytests/unit/fileclient/test_fileclient.py new file mode 100644 index 000000000000..0e072e5c36f4 --- /dev/null +++ b/tests/pytests/unit/fileclient/test_fileclient.py @@ -0,0 +1,223 @@ +""" +Tests for the salt fileclient +""" +import errno +import logging +import os + +import pytest + +import salt.utils.files +from salt import fileclient +from tests.support.mock import AsyncMock, MagicMock, Mock, patch + +log = logging.getLogger(__name__) + + +@pytest.fixture +def mocked_opts(tmp_path): + fs_root = os.path.join(tmp_path, "fileclient_fs_root") + cache_root = os.path.join(tmp_path, "fileclient_cache_root") + return { + "file_roots": {x: [os.path.join(fs_root, x)] for x in ("base", "dev")}, + "fileserver_backend": ["roots"], + "cachedir": cache_root, + "file_client": "local", + } + + +@pytest.fixture +def configure_loader_modules(tmp_path, mocked_opts): + return {fileclient: {"__opts__": mocked_opts}} + + +@pytest.fixture +def file_client(mocked_opts): + client = fileclient.Client(mocked_opts) + try: + yield client + finally: + del client + + +@pytest.fixture +def client_opts(): + return { + "extension_modules": "", + "cachedir": "/__test__", + } + + +def _fake_makedir(num=errno.EEXIST): + def _side_effect(*args, **kwargs): + raise OSError(num, "Errno {}".format(num)) + + return Mock(side_effect=_side_effect) + + +class MockReqChannel: + def factory(self, opts): + return self + + def close(self): + return True + + def send(self, load): + return self + + +def test_fileclient_context_manager_closes(minion_opts, master_opts): + """ + ensure fileclient channel closes + when used with a context manager + """ + minion_opts.update( + { + "id": "root", + "transport": "zeromq", + "auth_tries": 1, + "auth_timeout": 5, + "master_ip": "127.0.0.1", + "master_port": master_opts["ret_port"], + "master_uri": f"tcp://127.0.0.1:{master_opts['ret_port']}", + "request_channel_timeout": 1, + "request_channel_tries": 1, + } + ) + mock_reqchannel = MockReqChannel() + patch_reqchannel = patch.object( + salt.channel.client, "ReqChannel", return_value=mock_reqchannel + ) + with patch_reqchannel: + with fileclient.get_file_client(minion_opts) as client: + client.master_opts() + assert not client._closing + + assert client._closing + assert client.channel.close.called + + +@pytest.mark.slow_test +def test_fileclient_timeout(minion_opts, master_opts): + """ + ensure fileclient channel closes + when used with a context manager + """ + minion_opts.update( + { + "id": "root", + "transport": "zeromq", + "auth_tries": 1, + "auth_timeout": 5, + "master_ip": "127.0.0.1", + "master_port": master_opts["ret_port"], + "master_uri": f"tcp://127.0.0.1:{master_opts['ret_port']}", + "request_channel_timeout": 1, + "request_channel_tries": 1, + } + ) + + def mock_dumps(*args): + return b"meh" + + with fileclient.get_file_client(minion_opts) as client: + # Authenticate must return true + with patch.object(client.auth, "authenticate", AsyncMock(return_value=True)): + # Crypticle must return bytes to pass to transport.RequestClient.send + client.auth._crypticle = Mock() + client.auth._crypticle.dumps = mock_dumps + with pytest.raises(salt.exceptions.SaltClientError): + client.file_list() + + +def test_cache_skips_makedirs_on_race_condition(client_opts): + """ + If cache contains already a directory, do not raise an exception. + """ + with patch("os.path.isfile", return_value=False): + for exists in range(2): + with patch("os.makedirs", _fake_makedir()): + with fileclient.Client(client_opts)._cache_loc("testfile") as c_ref_itr: + assert c_ref_itr == os.sep + os.sep.join( + ["__test__", "files", "base", "testfile"] + ) + + +def test_cache_raises_exception_on_non_eexist_ioerror(client_opts): + """ + If makedirs raises other than EEXIST errno, an exception should be raised. + """ + with patch("os.path.isfile", return_value=False): + with patch("os.makedirs", _fake_makedir(num=errno.EROFS)): + with pytest.raises(OSError): + with fileclient.Client(client_opts)._cache_loc("testfile") as c_ref_itr: + assert c_ref_itr == "/__test__/files/base/testfile" + + +def test_cache_extrn_path_valid(client_opts): + """ + Tests for extrn_filepath for a given url + """ + file_name = "http://localhost:8000/test/location/src/dev/usr/file" + + ret = fileclient.Client(client_opts)._extrn_path(file_name, "base") + assert ":" not in ret + assert ret == os.path.join("__test__", "extrn_files", "base", ret) + + +def test_cache_extrn_path_invalid(client_opts): + """ + Tests for extrn_filepath for a given url + """ + file_name = "http://localhost:8000/../../../../../usr/bin/bad" + + ret = fileclient.Client(client_opts)._extrn_path(file_name, "base") + assert ret == "Invalid path" + + +def test_extrn_path_with_long_filename(client_opts): + safe_file_name = os.path.split( + fileclient.Client(client_opts)._extrn_path( + "https://test.com/" + ("A" * 254), "base" + ) + )[-1] + assert safe_file_name == "A" * 254 + + oversized_file_name = os.path.split( + fileclient.Client(client_opts)._extrn_path( + "https://test.com/" + ("A" * 255), "base" + ) + )[-1] + assert len(oversized_file_name) < 256 + assert oversized_file_name != "A" * 255 + + oversized_file_with_query_params = os.path.split( + fileclient.Client(client_opts)._extrn_path( + "https://test.com/file?" + ("A" * 255), "base" + ) + )[-1] + assert len(oversized_file_with_query_params) < 256 + + +def test_file_list_emptydirs(file_client): + """ + Ensure that the fileclient class won't allow a direct call to file_list_emptydirs() + """ + with pytest.raises(NotImplementedError): + file_client.file_list_emptydirs() + + +def test_get_file(file_client): + """ + Ensure that the fileclient class won't allow a direct call to get_file() + """ + with pytest.raises(NotImplementedError): + file_client.get_file(None) + + +def test_get_file_client(file_client): + minion_opts = {} + minion_opts["file_client"] = "remote" + with patch("salt.fileclient.RemoteClient", MagicMock(return_value="remote_client")): + ret = fileclient.get_file_client(minion_opts) + assert "remote_client" == ret diff --git a/tests/pytests/unit/fileclient/test_fileclient_cache.py b/tests/pytests/unit/fileclient/test_fileclient_cache.py new file mode 100644 index 000000000000..81501d4dc404 --- /dev/null +++ b/tests/pytests/unit/fileclient/test_fileclient_cache.py @@ -0,0 +1,342 @@ +import errno +import logging +import os +import shutil + +import pytest + +import salt.utils.files +from salt import fileclient +from tests.support.mock import patch + +log = logging.getLogger(__name__) + + +SUBDIR = "subdir" + + +def _saltenvs(): + return ("base", "dev") + + +def _subdir_files(): + return ("foo.txt", "bar.txt", "baz.txt") + + +def _get_file_roots(fs_root): + return {x: [os.path.join(fs_root, x)] for x in _saltenvs()} + + +@pytest.fixture +def fs_root(tmp_path): + return os.path.join(tmp_path, "fileclient_fs_root") + + +@pytest.fixture +def cache_root(tmp_path): + return os.path.join(tmp_path, "fileclient_cache_root") + + +@pytest.fixture +def mocked_opts(tmp_path, fs_root, cache_root): + return { + "file_roots": _get_file_roots(fs_root), + "fileserver_backend": ["roots"], + "cachedir": cache_root, + "file_client": "local", + } + + +@pytest.fixture +def configure_loader_modules(tmp_path, mocked_opts): + return {fileclient: {"__opts__": mocked_opts}} + + +@pytest.fixture(autouse=True) +def _setup(fs_root, cache_root): + """ + No need to add a dummy foo.txt to muddy up the github repo, just make + our own fileserver root on-the-fly. + """ + + def _new_dir(path): + """ + Add a new dir at ``path`` using os.makedirs. If the directory + already exists, remove it recursively and then try to create it + again. + """ + try: + os.makedirs(path) + except OSError as exc: + if exc.errno == errno.EEXIST: + # Just in case a previous test was interrupted, remove the + # directory and try adding it again. + shutil.rmtree(path) + os.makedirs(path) + else: + raise + + # Crete the FS_ROOT + for saltenv in _saltenvs(): + saltenv_root = os.path.join(fs_root, saltenv) + # Make sure we have a fresh root dir for this saltenv + _new_dir(saltenv_root) + + path = os.path.join(saltenv_root, "foo.txt") + with salt.utils.files.fopen(path, "w") as fp_: + fp_.write("This is a test file in the '{}' saltenv.\n".format(saltenv)) + + subdir_abspath = os.path.join(saltenv_root, SUBDIR) + os.makedirs(subdir_abspath) + for subdir_file in _subdir_files(): + path = os.path.join(subdir_abspath, subdir_file) + with salt.utils.files.fopen(path, "w") as fp_: + fp_.write( + "This is file '{}' in subdir '{} from saltenv '{}'".format( + subdir_file, SUBDIR, saltenv + ) + ) + + # Create the CACHE_ROOT + _new_dir(cache_root) + + +def test_cache_dir(mocked_opts, minion_opts): + """ + Ensure entire directory is cached to correct location + """ + patched_opts = minion_opts.copy() + patched_opts.update(mocked_opts) + + with patch.dict(fileclient.__opts__, patched_opts): + client = fileclient.get_file_client(fileclient.__opts__, pillar=False) + for saltenv in _saltenvs(): + assert client.cache_dir("salt://{}".format(SUBDIR), saltenv, cachedir=None) + for subdir_file in _subdir_files(): + cache_loc = os.path.join( + fileclient.__opts__["cachedir"], + "files", + saltenv, + SUBDIR, + subdir_file, + ) + # Double check that the content of the cached file + # identifies it as being from the correct saltenv. The + # setUp function creates the file with the name of the + # saltenv mentioned in the file, so a simple 'in' check is + # sufficient here. If opening the file raises an exception, + # this is a problem, so we are not catching the exception + # and letting it be raised so that the test fails. + with salt.utils.files.fopen(cache_loc) as fp_: + content = fp_.read() + log.debug("cache_loc = %s", cache_loc) + log.debug("content = %s", content) + assert subdir_file in content + assert SUBDIR in content + assert saltenv in content + + +def test_cache_dir_with_alternate_cachedir_and_absolute_path( + mocked_opts, minion_opts, tmp_path +): + """ + Ensure entire directory is cached to correct location when an alternate + cachedir is specified and that cachedir is an absolute path + """ + patched_opts = minion_opts.copy() + patched_opts.update(mocked_opts) + alt_cachedir = os.path.join(tmp_path, "abs_cachedir") + + with patch.dict(fileclient.__opts__, patched_opts): + client = fileclient.get_file_client(fileclient.__opts__, pillar=False) + for saltenv in _saltenvs(): + assert client.cache_dir( + "salt://{}".format(SUBDIR), saltenv, cachedir=alt_cachedir + ) + for subdir_file in _subdir_files(): + cache_loc = os.path.join( + alt_cachedir, "files", saltenv, SUBDIR, subdir_file + ) + # Double check that the content of the cached file + # identifies it as being from the correct saltenv. The + # setUp function creates the file with the name of the + # saltenv mentioned in the file, so a simple 'in' check is + # sufficient here. If opening the file raises an exception, + # this is a problem, so we are not catching the exception + # and letting it be raised so that the test fails. + with salt.utils.files.fopen(cache_loc) as fp_: + content = fp_.read() + log.debug("cache_loc = %s", cache_loc) + log.debug("content = %s", content) + assert subdir_file in content + assert SUBDIR in content + assert saltenv in content + + +def test_cache_dir_with_alternate_cachedir_and_relative_path(mocked_opts, minion_opts): + """ + Ensure entire directory is cached to correct location when an alternate + cachedir is specified and that cachedir is a relative path + """ + patched_opts = minion_opts.copy() + patched_opts.update(mocked_opts) + alt_cachedir = "foo" + + with patch.dict(fileclient.__opts__, patched_opts): + client = fileclient.get_file_client(fileclient.__opts__, pillar=False) + for saltenv in _saltenvs(): + assert client.cache_dir( + "salt://{}".format(SUBDIR), saltenv, cachedir=alt_cachedir + ) + for subdir_file in _subdir_files(): + cache_loc = os.path.join( + fileclient.__opts__["cachedir"], + alt_cachedir, + "files", + saltenv, + SUBDIR, + subdir_file, + ) + # Double check that the content of the cached file + # identifies it as being from the correct saltenv. The + # setUp function creates the file with the name of the + # saltenv mentioned in the file, so a simple 'in' check is + # sufficient here. If opening the file raises an exception, + # this is a problem, so we are not catching the exception + # and letting it be raised so that the test fails. + with salt.utils.files.fopen(cache_loc) as fp_: + content = fp_.read() + log.debug("cache_loc = %s", cache_loc) + log.debug("content = %s", content) + assert subdir_file in content + assert SUBDIR in content + assert saltenv in content + + +def test_cache_file(mocked_opts, minion_opts): + """ + Ensure file is cached to correct location + """ + patched_opts = minion_opts.copy() + patched_opts.update(mocked_opts) + + with patch.dict(fileclient.__opts__, patched_opts): + client = fileclient.get_file_client(fileclient.__opts__, pillar=False) + for saltenv in _saltenvs(): + assert client.cache_file("salt://foo.txt", saltenv, cachedir=None) + cache_loc = os.path.join( + fileclient.__opts__["cachedir"], "files", saltenv, "foo.txt" + ) + # Double check that the content of the cached file identifies + # it as being from the correct saltenv. The setUp function + # creates the file with the name of the saltenv mentioned in + # the file, so a simple 'in' check is sufficient here. If + # opening the file raises an exception, this is a problem, so + # we are not catching the exception and letting it be raised so + # that the test fails. + with salt.utils.files.fopen(cache_loc) as fp_: + content = fp_.read() + log.debug("cache_loc = %s", cache_loc) + log.debug("content = %s", content) + assert saltenv in content + + +def test_cache_file_with_alternate_cachedir_and_absolute_path( + mocked_opts, minion_opts, tmp_path +): + """ + Ensure file is cached to correct location when an alternate cachedir is + specified and that cachedir is an absolute path + """ + patched_opts = minion_opts.copy() + patched_opts.update(mocked_opts) + alt_cachedir = os.path.join(tmp_path, "abs_cachedir") + + with patch.dict(fileclient.__opts__, patched_opts): + client = fileclient.get_file_client(fileclient.__opts__, pillar=False) + for saltenv in _saltenvs(): + assert client.cache_file("salt://foo.txt", saltenv, cachedir=alt_cachedir) + cache_loc = os.path.join(alt_cachedir, "files", saltenv, "foo.txt") + # Double check that the content of the cached file identifies + # it as being from the correct saltenv. The setUp function + # creates the file with the name of the saltenv mentioned in + # the file, so a simple 'in' check is sufficient here. If + # opening the file raises an exception, this is a problem, so + # we are not catching the exception and letting it be raised so + # that the test fails. + with salt.utils.files.fopen(cache_loc) as fp_: + content = fp_.read() + log.debug("cache_loc = %s", cache_loc) + log.debug("content = %s", content) + assert saltenv in content + + +def test_cache_file_with_alternate_cachedir_and_relative_path(mocked_opts, minion_opts): + """ + Ensure file is cached to correct location when an alternate cachedir is + specified and that cachedir is a relative path + """ + patched_opts = minion_opts.copy() + patched_opts.update(mocked_opts) + alt_cachedir = "foo" + + with patch.dict(fileclient.__opts__, patched_opts): + client = fileclient.get_file_client(fileclient.__opts__, pillar=False) + for saltenv in _saltenvs(): + assert client.cache_file("salt://foo.txt", saltenv, cachedir=alt_cachedir) + cache_loc = os.path.join( + fileclient.__opts__["cachedir"], + alt_cachedir, + "files", + saltenv, + "foo.txt", + ) + # Double check that the content of the cached file identifies + # it as being from the correct saltenv. The setUp function + # creates the file with the name of the saltenv mentioned in + # the file, so a simple 'in' check is sufficient here. If + # opening the file raises an exception, this is a problem, so + # we are not catching the exception and letting it be raised so + # that the test fails. + with salt.utils.files.fopen(cache_loc) as fp_: + content = fp_.read() + log.debug("cache_loc = %s", cache_loc) + log.debug("content = %s", content) + assert saltenv in content + + +def test_cache_dest(mocked_opts, minion_opts): + """ + Tests functionality for cache_dest + """ + patched_opts = minion_opts.copy() + patched_opts.update(mocked_opts) + + relpath = "foo.com/bar.txt" + cachedir = minion_opts["cachedir"] + + def _external(saltenv="base"): + return salt.utils.path.join( + patched_opts["cachedir"], "extrn_files", saltenv, relpath + ) + + def _salt(saltenv="base"): + return salt.utils.path.join(patched_opts["cachedir"], "files", saltenv, relpath) + + def _check(ret, expected): + assert ret == expected, "{} != {}".format(ret, expected) + + with patch.dict(fileclient.__opts__, patched_opts): + client = fileclient.get_file_client(fileclient.__opts__, pillar=False) + + _check(client.cache_dest(f"https://{relpath}"), _external()) + + _check(client.cache_dest(f"https://{relpath}", "dev"), _external("dev")) + + _check(client.cache_dest(f"salt://{relpath}"), _salt()) + + _check(client.cache_dest(f"salt://{relpath}", "dev"), _salt("dev")) + + _check(client.cache_dest(f"salt://{relpath}?saltenv=dev"), _salt("dev")) + + _check("/foo/bar", "/foo/bar") diff --git a/tests/pytests/unit/grains/test_core.py b/tests/pytests/unit/grains/test_core.py index 993c72395087..8fb93e63fb33 100644 --- a/tests/pytests/unit/grains/test_core.py +++ b/tests/pytests/unit/grains/test_core.py @@ -7,6 +7,7 @@ """ import errno +import locale import logging import os import pathlib @@ -15,6 +16,7 @@ import sys import tempfile import textwrap +import uuid from collections import namedtuple import pytest @@ -177,6 +179,15 @@ def test_network_grains_secondary_ip(tmp_path): "2001:4860:4860::8888", ] + with patch("salt.utils.platform.is_proxy", return_value=True): + assert core.ip6_interfaces() == {} + + with patch("salt.utils.platform.is_proxy", return_value=True): + assert core.ip4_interfaces() == {} + + with patch("salt.utils.platform.is_proxy", return_value=True): + assert core.ip_interfaces() == {} + def test_network_grains_cache(tmp_path): """ @@ -335,6 +346,16 @@ def test_parse_cpe_name_wfn(cpe, cpe_ret): "part": None, }, ), + ( + "cpe:2.3:o:microsoft:windows_xp:5.1.601", + { + "phase": None, + "version": "5.1.601", + "product": "windows_xp", + "vendor": "microsoft", + "part": "operating system", + }, + ), ), ) def test_parse_cpe_name_v23(cpe, cpe_ret): @@ -890,6 +911,36 @@ def test_debian_11_os_grains(): _run_os_grains_tests(_os_release_data, {}, expectation) +@pytest.mark.skip_unless_on_linux +def test_debian_12_os_grains(): + """ + Test if OS grains are parsed correctly in Debian 12 "bookworm" + """ + # /etc/os-release data taken from base-files 12.4 + _os_release_data = { + "PRETTY_NAME": "Debian GNU/Linux 12 (bookworm)", + "NAME": "Debian GNU/Linux", + "VERSION_ID": "12", + "VERSION": "12 (bookworm)", + "VERSION_CODENAME": "bookworm", + "ID": "debian", + "HOME_URL": "https://www.debian.org/", + "SUPPORT_URL": "https://www.debian.org/support", + "BUG_REPORT_URL": "https://bugs.debian.org/", + } + expectation = { + "os": "Debian", + "os_family": "Debian", + "oscodename": "bookworm", + "osfullname": "Debian GNU/Linux", + "osrelease": "12", + "osrelease_info": (12,), + "osmajorrelease": 12, + "osfinger": "Debian-12", + } + _run_os_grains_tests(_os_release_data, {}, expectation) + + @pytest.mark.skip_unless_on_linux def test_centos_8_os_grains(): """ @@ -1601,6 +1652,17 @@ def test_linux_memdata(): assert memdata.get("mem_total") == 15895 assert memdata.get("swap_total") == 4676 + _proc_meminfo = textwrap.dedent( + """\ + MemTotal: 16277028 kB + + SwapTotal: 4789244 kB""" + ) + with patch("salt.utils.files.fopen", mock_open(read_data=_proc_meminfo)): + memdata = core._linux_memdata() + assert memdata.get("mem_total") == 15895 + assert memdata.get("swap_total") == 4676 + @pytest.mark.skip_on_windows def test_bsd_memdata(): @@ -1963,6 +2025,16 @@ def test_fqdn_return(ipv4_tuple, ipv6_tuple): ) +@pytest.mark.skip_unless_on_linux +def test_fqdn_proxy_return_empty(): + """ + test ip_fqdn returns empty for proxy minions + """ + + with patch.object(salt.utils.platform, "is_proxy", MagicMock(return_value=True)): + assert core.ip_fqdn() == {} + + @pytest.mark.skip_unless_on_linux def test_fqdn6_empty(ipv4_tuple, ipv6_tuple): """ @@ -2108,6 +2180,19 @@ def test_dns_return(ipv4_tuple, ipv6_tuple): ): assert core.dns() == ret + with patch("os.path.exists", return_value=False), patch.object( + salt.utils.dns, "parse_resolv", MagicMock(return_value=resolv_mock) + ): + assert core.dns() == ret + + with patch.object(salt.utils.platform, "is_windows", MagicMock(return_value=True)): + assert core.dns() == {} + + with patch.object( + salt.utils.platform, "is_windows", MagicMock(return_value=True) + ), patch("salt.grains.core.__opts__", {"proxyminion": True}): + assert core.dns() == {} + def test_enable_fqdns_false(): """ @@ -2541,19 +2626,52 @@ def test_osx_memdata(): test osx memdata """ - def _cmd_side_effect(cmd): + def _cmd_side_effect_megabyte(cmd): if "hw.memsize" in cmd: return "4294967296" elif "vm.swapusage" in cmd: return "total = 0.00M used = 0.00M free = 0.00M (encrypted)" with patch.dict( - core.__salt__, {"cmd.run": MagicMock(side_effect=_cmd_side_effect)} + core.__salt__, {"cmd.run": MagicMock(side_effect=_cmd_side_effect_megabyte)} + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/sbin/sysctl")): + ret = core._osx_memdata() + assert ret["swap_total"] == 0 + assert ret["mem_total"] == 4096 + + def _cmd_side_effect_kilobyte(cmd): + if "hw.memsize" in cmd: + return "4294967296" + elif "vm.swapusage" in cmd: + return "total = 0.00K used = 0.00K free = 0.00K (encrypted)" + + with patch.dict( + core.__salt__, {"cmd.run": MagicMock(side_effect=_cmd_side_effect_kilobyte)} ), patch("salt.utils.path.which", MagicMock(return_value="/usr/sbin/sysctl")): ret = core._osx_memdata() assert ret["swap_total"] == 0 assert ret["mem_total"] == 4096 + def _cmd_side_effect_gigabyte(cmd): + if "hw.memsize" in cmd: + return "4294967296" + elif "vm.swapusage" in cmd: + return "total = 0.00G used = 0.00G free = 0.00G (encrypted)" + + with patch.dict( + core.__salt__, {"cmd.run": MagicMock(side_effect=_cmd_side_effect_gigabyte)} + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/sbin/sysctl")): + ret = core._osx_memdata() + assert ret["swap_total"] == 0 + assert ret["mem_total"] == 4096 + + with patch.dict( + core.__salt__, {"cmd.run": MagicMock(side_effect=_cmd_side_effect_gigabyte)} + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/sbin/sysctl")): + ret = core._memdata({"kernel": "Darwin"}) + assert ret["swap_total"] == 0 + assert ret["mem_total"] == 4096 + @pytest.mark.skipif(not core._DATEUTIL_TZ, reason="Missing dateutil.tz") def test_locale_info_tzname(): @@ -2640,6 +2758,36 @@ def test_locale_info_no_tz_tzname(): assert ret["locale_info"]["timezone"] == "unknown" +def test_locale_info_proxy_empty(): + with patch.object(salt.utils.platform, "is_proxy", return_value=True): + ret = core.locale_info() + assert ret == {"locale_info": {}} + + +@pytest.mark.skipif(not core._DATEUTIL_TZ, reason="Missing dateutil.tz") +def test_locale_getlocale_exception(): + # mock datetime.now().tzname() + # cant just mock now because it is read only + tzname = Mock(return_value="MDT_FAKE") + now_ret_object = Mock(tzname=tzname) + now = Mock(return_value=now_ret_object) + datetime = Mock(now=now) + + with patch.object( + core, "datetime", datetime=datetime + ) as datetime_module, patch.object( + core.dateutil.tz, "tzlocal", return_value=object + ) as tzlocal, patch.object( + salt.utils.platform, "is_proxy", return_value=False + ) as is_proxy, patch.object( + locale, "getlocale", side_effect=Exception() + ): + ret = core.locale_info() + + assert ret["locale_info"]["defaultlanguage"] == "unknown" + assert ret["locale_info"]["defaultencoding"] == "unknown" + + def test_cwd_exists(): cwd_grain = core.cwd() @@ -2770,6 +2918,10 @@ def test_virtual_has_virtual_grain(): {"kernel": "Windows", "manufacturer": "Parallels Software"}, {"virtual": "Parallels"}, ), + ( + {"kernel": "Windows", "manufacturer": None, "productname": None}, + {"virtual": "physical"}, + ), ], ) def test__windows_virtual(osdata, expected): @@ -2861,7 +3013,7 @@ def test_osdata_virtual_key_win(): @pytest.mark.skip_unless_on_linux -def test_linux_cpu_data_num_cpus(): +def test_linux_cpu_data(): cpuinfo_list = [] for i in range(0, 20): cpuinfo_dict = { @@ -2885,6 +3037,60 @@ def test_linux_cpu_data_num_cpus(): ret = core._linux_cpudata() assert "num_cpus" in ret assert len(cpuinfo_list) == ret["num_cpus"] + assert "cpu_flags" in ret + assert "cpu_model" in ret + + cpuinfo_list = [] + cpuinfo_dict = { + "processors": 20, + "cpu_family": 6, + "model_name": "Intel(R) Core(TM) i7-7700HQ CPU @ 2.80GHz", + "Features": "fpu vme de pse tsc msr pae mce cx8 apic sep mtrr", + } + + cpuinfo_list.append(cpuinfo_dict) + cpuinfo_content = "" + for item in cpuinfo_list: + cpuinfo_content += ( + "# processors: {}\n" "cpu family: {}\n" "vendor_id: {}\n" "Features: {}\n\n" + ).format( + item["processors"], item["cpu_family"], item["model_name"], item["Features"] + ) + + with patch.object(os.path, "isfile", MagicMock(return_value=True)), patch( + "salt.utils.files.fopen", mock_open(read_data=cpuinfo_content) + ): + ret = core._linux_cpudata() + assert "num_cpus" in ret + assert "cpu_flags" in ret + assert "cpu_model" in ret + + cpuinfo_dict = { + "Processor": "ARMv6-compatible processor rev 7 (v6l)", + "BogoMIPS": "697.95", + "Features": "swp half thumb fastmult vfp edsp java tls", + "CPU implementer": "0x41", + "CPU architecture": "7", + "CPU variant": "0x0", + "CPU part": "0xb76", + "CPU revision": "7", + "Hardware": "BCM2708", + "Revision": "0002", + "Serial": "00000000", + } + + cpuinfo_content = "" + for item in cpuinfo_dict: + cpuinfo_content += f"{item}: {cpuinfo_dict[item]}\n" + cpuinfo_content += "\n\n" + + with patch.object(os.path, "isfile", MagicMock(return_value=True)), patch( + "salt.utils.files.fopen", mock_open(read_data=cpuinfo_content) + ): + ret = core._linux_cpudata() + assert "num_cpus" in ret + assert "cpu_flags" in ret + assert "cpu_model" in ret @pytest.mark.skip_on_windows @@ -2969,6 +3175,16 @@ def test_saltversioninfo(): assert all([isinstance(x, int) for x in info]) +def test_saltversion(): + """ + test saltversion core grain. + """ + ret = core.saltversion() + info = ret["saltversion"] + assert isinstance(ret, dict) + assert isinstance(info, str) + + def test_path(): comps = ["foo", "bar", "baz"] path = os.path.pathsep.join(comps) @@ -3112,7 +3328,25 @@ def _open_mock(file_name, *args, **kwargs): assert core.kernelparams() == expected -def test_linux_gpus(): +@pytest.mark.skip_unless_on_linux +def test_kernelparams_file_not_found_error(): + with patch("salt.utils.files.fopen", MagicMock()) as fopen_mock: + fopen_mock.side_effect = FileNotFoundError() + ret = core.kernelparams() + assert ret == {} + + +@pytest.mark.skip_unless_on_linux +def test_kernelparams_oserror(caplog): + with patch("salt.utils.files.fopen", MagicMock()) as fopen_mock: + with caplog.at_level(logging.DEBUG): + fopen_mock.side_effect = OSError() + ret = core.kernelparams() + assert ret == {} + assert "Failed to read /proc/cmdline: " in caplog.messages + + +def test_linux_gpus(caplog): """ Test GPU detection on Linux systems """ @@ -3177,6 +3411,15 @@ def _cmd_side_effect(cmd): "intel", ], # Display controller ] + + with patch("salt.grains.core.__opts__", {"enable_lspci": False}): + ret = core._linux_gpu_data() + assert ret == {} + + with patch("salt.grains.core.__opts__", {"enable_gpu_grains": False}): + ret = core._linux_gpu_data() + assert ret == {} + with patch( "salt.utils.path.which", MagicMock(return_value="/usr/sbin/lspci") ), patch.dict(core.__salt__, {"cmd.run": MagicMock(side_effect=_cmd_side_effect)}): @@ -3189,6 +3432,37 @@ def _cmd_side_effect(cmd): assert ret[count]["vendor"] == device[3] count += 1 + with patch( + "salt.utils.path.which", MagicMock(return_value="/usr/sbin/lspci") + ), patch.dict(core.__salt__, {"cmd.run": MagicMock(side_effect=OSError)}): + ret = core._linux_gpu_data() + assert ret == {"num_gpus": 0, "gpus": []} + + bad_gpu_data = textwrap.dedent( + """ + Class: VGA compatible controller + Vendor: Advanced Micro Devices, Inc. [AMD/ATI] + Device: Vega [Radeon RX Vega]] + SVendor; Evil Corp. + SDevice: Graphics XXL + Rev: c1 + NUMANode: 0""" + ) + + with patch( + "salt.utils.path.which", MagicMock(return_value="/usr/sbin/lspci") + ), patch.dict( + core.__salt__, {"cmd.run": MagicMock(return_value=bad_gpu_data)} + ), caplog.at_level( + logging.WARN + ): + core._linux_gpu_data() + assert ( + "Error loading grains, unexpected linux_gpu_data output, " + "check that you have a valid shell configured and permissions " + "to run lspci command" in caplog.messages + ) + def test_get_server_id(): expected = {"server_id": 94889706} @@ -3198,6 +3472,9 @@ def test_get_server_id(): with patch.dict(core.__opts__, {"id": "otherid"}): assert core.get_server_id() != expected + with patch.object(salt.utils.platform, "is_proxy", MagicMock(return_value=True)): + assert core.get_server_id() == {} + def test_linux_cpudata_ppc64le(): cpuinfo = """processor : 0 @@ -3593,3 +3870,1169 @@ def _mock_is_file(filename): assert virtual_grains["virtual"] == "Nitro" assert virtual_grains["virtual_subtype"] == "Amazon EC2" + + +def test_append_domain(): + """ + test append_domain + """ + + assert core.append_domain() == {} + + with patch.object(salt.utils.platform, "is_proxy", MagicMock(return_value=True)): + assert core.append_domain() == {} + + with patch("salt.grains.core.__opts__", {"append_domain": "example.com"}): + assert core.append_domain() == {"append_domain": "example.com"} + + +def test_hostname(): + """ + test append_domain + """ + + with patch.object(salt.utils.platform, "is_proxy", MagicMock(return_value=True)): + assert core.hostname() == {} + + with patch("salt.grains.core.__FQDN__", None), patch( + "socket.gethostname", MagicMock(return_value=None) + ), patch("salt.utils.network.get_fqhostname", MagicMock(return_value=None)): + assert core.hostname() == { + "localhost": None, + "fqdn": "localhost.localdomain", + "host": "localhost", + "domain": "localdomain", + } + + +def test_zmqversion(): + """ + test zmqversion + """ + + ret = core.zmqversion() + assert "zmqversion" in ret + + with patch.dict("sys.modules", {"zmq": None}): + ret = core.zmqversion() + assert "zmqversion" not in ret + + +def test_saltpath(): + """ + test saltpath + """ + + ret = core.saltpath() + assert "saltpath" in ret + + +def test_pythonexecutable(): + """ + test pythonexecutable + """ + python_executable = sys.executable + + ret = core.pythonexecutable() + assert "pythonexecutable" in ret + assert ret["pythonexecutable"] == python_executable + + +def test_pythonpath(): + """ + test pythonpath + """ + python_path = sys.path + + ret = core.pythonpath() + assert "pythonpath" in ret + assert ret["pythonpath"] == python_path + + +def test_pythonversion(): + """ + test pythonversion + """ + python_version = [*sys.version_info] + + ret = core.pythonversion() + assert "pythonversion" in ret + assert ret["pythonversion"] == python_version + + +@pytest.mark.skip_unless_on_linux +def test_get_machine_id(): + """ + test get_machine_id + """ + + ret = core.get_machine_id() + assert "machine_id" in ret + + with patch.object(os.path, "exists", return_value=False): + ret = core.get_machine_id() + assert ret == {} + + with patch.object(platform, "system", return_value="AIX"): + with patch.object(core, "_aix_get_machine_id", return_value="AIX-MACHINE-ID"): + ret = core.get_machine_id() + assert ret == "AIX-MACHINE-ID" + + +def test_hwaddr_interfaces(): + """ + test hwaddr_interfaces + """ + + mock_get_interfaces = { + "lo": { + "up": True, + "hwaddr": "00:00:00:00:00:00", + "inet": [ + { + "address": "127.0.0.1", + "netmask": "255.0.0.0", + "broadcast": None, + "label": "lo", + } + ], + "inet6": [], + }, + "eth1": { + "up": True, + "hwaddr": "00:00:00:00:00:00", + "inet": [ + { + "address": "0.0.0.0", + "netmask": "255.255.255.0", + "broadcast": "0.0.0.0", + "label": "wlo1", + } + ], + "inet6": [], + }, + } + with patch.object(core, "_get_interfaces", return_value=mock_get_interfaces): + ret = core.hwaddr_interfaces() + assert "hwaddr_interfaces" in ret + assert ret["hwaddr_interfaces"] == { + "lo": "00:00:00:00:00:00", + "eth1": "00:00:00:00:00:00", + } + + +def test_id(): + """ + test id + """ + ret = core.id_() + assert "id" in ret + + with patch("salt.grains.core.__opts__", {"id": "test_id_minion_id"}): + ret = core.id_() + assert "id" in ret + assert ret["id"] == "test_id_minion_id" + + +def test__linux_bin_exists(): + """ + test __linux_bin_exists + """ + mock_retcode = [salt.exceptions.CommandExecutionError, 0] + with patch.dict( + core.__salt__, {"cmd.retcode": MagicMock(side_effect=mock_retcode)} + ): + ret = core._linux_bin_exists("ls") + assert ret + + mock_retcode = salt.exceptions.CommandExecutionError + mock_runall = [ + {"pid": 100, "retcode": 0, "stdout": "ls: /usr/bin/ls", "stderr": ""} + ] + with patch.dict( + core.__salt__, {"cmd.retcode": MagicMock(side_effect=mock_retcode)} + ): + with patch.dict( + core.__salt__, {"cmd.run_all": MagicMock(side_effect=mock_runall)} + ): + ret = core._linux_bin_exists("ls") + assert ret + + mock_retcode = salt.exceptions.CommandExecutionError + mock_runall = salt.exceptions.CommandExecutionError + + with patch.dict( + core.__salt__, {"cmd.retcode": MagicMock(side_effect=mock_retcode)} + ): + with patch.dict( + core.__salt__, {"cmd.run_all": MagicMock(side_effect=mock_runall)} + ): + ret = core._linux_bin_exists("ls") + assert not ret + + +def test__parse_lsb_release(): + """ + test __parse_lsb_release + """ + mock_lsb_file = """ +DISTRIB_ID="ManjaroLinux" +DISTRIB_RELEASE="23.0.2" +DISTRIB_CODENAME="Uranos" +DISTRIB_DESCRIPTION="Manjaro Linux" +""" + + with patch("salt.utils.files.fopen", mock_open(read_data=mock_lsb_file)): + ret = core._parse_lsb_release() + assert ret == { + "lsb_distrib_id": "ManjaroLinux", + "lsb_distrib_release": "23.0.2", + "lsb_distrib_codename": "Uranos", + "lsb_distrib_description": "Manjaro Linux", + } + + with patch("salt.utils.files.fopen", side_effect=OSError): + ret = core._parse_lsb_release() + assert ret == {} + + +def test__osx_gpudata(): + """ + test __osx_gpudata + """ + mock_gpudata = """ +Graphics/Displays: + + NVIDIA GeForce 320M: + + Chipset Model: NVIDIA GeForce 320M + Type: GPU + VRAM (Total): 256 MB + Vendor: NVIDIA (0x10de) + Device ID: 0x08a0 + Revision ID: 0x00a2 + ROM Revision: 3533 + Displays: + Color LCD: + Display Type: LCD + Resolution: 1280 x 800 + UI Looks like: 1280 x 800 + Framebuffer Depth: 24-Bit Color (ARGB8888) + Main Display: Yes + Mirror: Off + Online: Yes + Automatically Adjust Brightness: Yes + Connection Type: Internal + +""" + with patch.dict(core.__salt__, {"cmd.run": MagicMock(return_value=mock_gpudata)}): + ret = core._osx_gpudata() + assert ret["num_gpus"] == 1 + assert ret["gpus"] == [{"vendor": "nvidia", "model": "GeForce 320M"}] + + with patch.dict(core.__salt__, {"cmd.run": MagicMock(side_effect=OSError)}): + ret = core._osx_gpudata() + assert ret == {"num_gpus": 0, "gpus": []} + + +def test_get_master(): + """ + test get_master + """ + ret = core.get_master() + assert "master" in ret + + with patch("salt.grains.core.__opts__", {"master": "test_master_id"}): + ret = core.get_master() + assert "master" in ret + assert ret["master"] == "test_master_id" + + +def test__selinux(): + """ + test _selinux + """ + with patch.dict( + core.__salt__, + { + "cmd.run": MagicMock(return_value="Enforcing"), + "cmd.retcode": MagicMock(return_value=1), + }, + ), patch.object(core, "_linux_bin_exists", MagicMock(return_value=False)): + ret = core._selinux() + assert ret == {"enabled": False} + + with patch.dict( + core.__salt__, + { + "cmd.run": MagicMock(return_value="Enforcing"), + "cmd.retcode": MagicMock(return_value=0), + }, + ), patch.object(core, "_linux_bin_exists", MagicMock(return_value=True)): + ret = core._selinux() + assert ret == {"enabled": True, "enforced": "Enforcing"} + + with patch.dict( + core.__salt__, + { + "cmd.run": MagicMock(return_value="Disabled"), + "cmd.retcode": MagicMock(return_value=0), + }, + ), patch.object(core, "_linux_bin_exists", MagicMock(return_value=True)): + ret = core._selinux() + assert ret == {"enabled": True, "enforced": "Disabled"} + + +def test__systemd(): + """ + test _systemd + """ + with patch.dict( + core.__salt__, + { + "cmd.run": MagicMock( + return_value=( + "systemd 254 (254.3-1)\n+PAM +AUDIT -SELINUX -APPARMOR -IMA +SMACK " + "+SECCOMP +GCRYPT +GNUTLS +OPENSSL +ACL +BLKID +CURL +ELFUTILS " + "+FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP +LIBFDISK +PCRE2 " + "-PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ +ZLIB +ZSTD " + "+BPF_FRAMEWORK +XKBCOMMON +UTMP -SYSVINIT default-hierarchy=unified" + ) + ), + }, + ): + ret = core._systemd() + assert "version" in ret + assert "features" in ret + assert ret["version"] == "254" + assert ret["features"] == ( + "+PAM +AUDIT -SELINUX -APPARMOR -IMA +SMACK +SECCOMP +GCRYPT +GNUTLS +OPENSSL " + "+ACL +BLKID +CURL +ELFUTILS +FIDO2 +IDN2 -IDN +IPTC +KMOD +LIBCRYPTSETUP " + "+LIBFDISK +PCRE2 -PWQUALITY +P11KIT -QRENCODE +TPM2 +BZIP2 +LZ4 +XZ " + "+ZLIB +ZSTD +BPF_FRAMEWORK +XKBCOMMON +UTMP -SYSVINIT default-hierarchy=unified" + ) + + +def test__clean_value_uuid(caplog): + """ + test _clean_value uuid + """ + ret = core._clean_value("key", None) + assert not ret + + ret = core._clean_value("uuid", "49e40e2a-63b4-11ee-8c99-0242ac120002") + assert ret == "49e40e2a-63b4-11ee-8c99-0242ac120002" + + with patch.object(uuid, "UUID", MagicMock()) as mock_uuid: + with caplog.at_level(logging.TRACE): + mock_uuid.side_effect = ValueError() + ret = core._clean_value("uuid", "49e40e2a-63b4-11ee-8c99-0242ac120002") + assert not ret + assert ( + "HW uuid value 49e40e2a-63b4-11ee-8c99-0242ac120002 is an invalid UUID" + in caplog.messages + ) + + +@pytest.mark.parametrize( + "grain,value,expected", + ( + ("kernelrelease", "10.0.14393", "10.0.14393"), + ("kernelversion", "10.0.14393", "10.0.14393"), + ("osversion", "10.0.14393", "10.0.14393"), + ("osrelease", "2016Server", "2016Server"), + ("osrelease", "to be filled", None), + ("osmanufacturer", "Microsoft Corporation", "Microsoft Corporation"), + ("manufacturer", "innotek GmbH", "innotek GmbH"), + ("manufacturer", "to be filled", None), + ("productname", "VirtualBox", "VirtualBox"), + ("biosversion", "Default System BIOS", "Default System BIOS"), + ("serialnumber", "0", None), + ( + "osfullname", + "Microsoft Windows Server 2016 Datacenter", + "Microsoft Windows Server 2016 Datacenter", + ), + ( + "timezone", + "(UTC-08:00) Pacific Time (US & Canada)", + "(UTC-08:00) Pacific Time (US & Canada)", + ), + ( + "uuid", + "d013f373-7331-4a9f-848b-72e379fbe7bf", + "d013f373-7331-4a9f-848b-72e379fbe7bf", + ), + ("windowsdomain", "WORKGROUP", "WORKGROUP"), + ("windowsdomaintype", "Workgroup", "Workgroup"), + ("motherboard.productname", "VirtualBox", "VirtualBox"), + ("motherboard.serialnumber", "0", None), + ("model_name", "Macbook Pro", "Macbook Pro"), + ("system_serialnumber", "W80322MWATM", "W80322MWATM"), + ), +) +def test__clean_value_multiple_values(grain, value, expected): + """ + test _clean_value multiple values + """ + ret = core._clean_value(grain, value) + assert ret == expected + + +def test__linux_init_system(caplog): + """ + test _linux_init_system + """ + with patch("os.stat", MagicMock()) as mock_os_stat: + mock_os_stat.side_effect = OSError() + with patch("salt.utils.files.fopen", MagicMock()) as mock_fopen: + mock_fopen.side_effect = OSError() + ret = core._linux_init_system() + assert ret == "unknown" + + with patch("os.stat", MagicMock()) as mock_os_stat: + mock_os_stat.side_effect = OSError() + with patch("salt.utils.files.fopen", mock_open(read_data="init-not-found")): + mock_fopen.side_effect = OSError() + ret = core._linux_init_system() + assert ret == "unknown" + + with patch("os.stat", MagicMock()) as mock_os_stat: + mock_os_stat.side_effect = OSError() + with patch( + "salt.utils.files.fopen", mock_open(read_data="/usr/sbin/supervisord") + ): + with patch("salt.utils.path.which", return_value="/usr/sbin/supervisord"): + ret = core._linux_init_system() + assert ret == "supervisord" + + with patch("os.stat", MagicMock()) as mock_os_stat: + mock_os_stat.side_effect = OSError() + with patch( + "salt.utils.files.fopen", mock_open(read_data="/usr/sbin/dumb-init") + ): + with patch( + "salt.utils.path.which", + side_effect=["/usr/sbin/dumb-init", "", "/usr/sbin/dumb-init"], + ): + ret = core._linux_init_system() + assert ret == "dumb-init" + + with patch("os.stat", MagicMock()) as mock_os_stat: + mock_os_stat.side_effect = OSError() + with patch("salt.utils.files.fopen", mock_open(read_data="/usr/sbin/tini")): + with patch( + "salt.utils.path.which", + side_effect=["/usr/sbin/tini", "", "", "/usr/sbin/tini"], + ): + ret = core._linux_init_system() + assert ret == "tini" + + with patch("os.stat", MagicMock()) as mock_os_stat: + mock_os_stat.side_effect = OSError() + with patch("salt.utils.files.fopen", mock_open(read_data="runit")): + with patch("salt.utils.path.which", side_effect=["", "", "", ""]): + ret = core._linux_init_system() + assert ret == "runit" + + with patch("os.stat", MagicMock()) as mock_os_stat: + mock_os_stat.side_effect = OSError() + with patch("salt.utils.files.fopen", mock_open(read_data="/sbin/my_init")): + with patch("salt.utils.path.which", side_effect=["", "", "", ""]): + ret = core._linux_init_system() + assert ret == "runit" + + with patch("os.stat", MagicMock()) as mock_os_stat: + mock_os_stat.side_effect = OSError() + with patch("salt.utils.files.fopen", mock_open(read_data="systemd")): + with patch("salt.utils.path.which", side_effect=[IndexError(), "", "", ""]): + with caplog.at_level(logging.WARNING): + ret = core._linux_init_system() + assert ret == "unknown" + assert ( + "Unable to fetch data from /proc/1/cmdline" in caplog.messages + ) + + +def test_default_gateway(): + """ + test default_gateway + """ + + with patch("salt.utils.path.which", return_value=""): + ret = core.default_gateway() + assert ret == {} + + with patch("salt.utils.path.which", return_value="/usr/sbin/ip"): + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(return_value="")}, + ): + + ret = core.default_gateway() + assert ret == {"ip_gw": False, "ip4_gw": False, "ip6_gw": False} + + with patch("salt.utils.path.which", return_value="/usr/sbin/ip"): + ip4_route = """default via 172.23.5.3 dev enp7s0u2u4 proto dhcp src 172.23.5.173 metric 100 +172.17.0.0/16 dev docker0 proto kernel scope link src 172.17.0.1 +172.19.0.0/16 dev docker_gwbridge proto kernel scope link src 172.19.0.1 +172.23.5.0/24 dev enp7s0u2u4 proto kernel scope link src 172.23.5.173 metric 100 +192.168.56.0/24 dev vboxnet0 proto kernel scope link src 192.168.56.1""" + + ip6_route = """2603:8001:b402:cc00::/64 dev enp7s0u2u4 proto ra metric 100 pref medium +fe80::/64 dev enp7s0u2u4 proto kernel metric 1024 pref medium +default via fe80::20d:b9ff:fe37:e65c dev enp7s0u2u4 proto ra metric 100 pref medium""" + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=[ip4_route, ip6_route])}, + ): + + ret = core.default_gateway() + assert ret == { + "ip4_gw": "172.23.5.3", + "ip6_gw": "fe80::20d:b9ff:fe37:e65c", + "ip_gw": True, + } + + with patch("salt.utils.path.which", return_value="/usr/sbin/ip"): + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=[ip4_route, ip6_route])}, + ): + + ret = core.default_gateway() + assert ret == { + "ip4_gw": "172.23.5.3", + "ip6_gw": "fe80::20d:b9ff:fe37:e65c", + "ip_gw": True, + } + + with patch("salt.utils.path.which", return_value="/usr/sbin/ip"): + ip_route = """default +172.17.0.0/16 dev docker0 proto kernel scope link src 172.17.0.1 +172.19.0.0/16 dev docker_gwbridge proto kernel scope link src 172.19.0.1 +172.23.5.0/24 dev enp7s0u2u4 proto kernel scope link src 172.23.5.173 metric 100 +192.168.56.0/24 dev vboxnet0 proto kernel scope link src 192.168.56.1""" + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=[ip_route])}, + ): + + ret = core.default_gateway() + assert ret == {"ip_gw": True, "ip4_gw": True, "ip6_gw": False} + + +def test__osx_platform_data(): + """ + test _osx_platform_data + """ + osx_platform_data = """Hardware: + + Hardware Overview: + + Model Name: MacBook Pro + Model Identifier: MacBookPro7,1 + Processor Name: Intel Core 2 Duo + Processor Speed: 2.4 GHz + Number of Processors: 1 + Total Number of Cores: 2 + L2 Cache: 3 MB + Memory: 16 GB + System Firmware Version: 68.0.0.0.0 + OS Loader Version: 540.120.3~22 + SMC Version (system): 1.62f7 + Serial Number (system): W80322MWATM + Hardware UUID: 3FA5BDA2-A740-5DF3-8A97-D9D4DB1CE24A + Provisioning UDID: 3FA5BDA2-A740-5DF3-8A97-D9D4DB1CE24A + Sudden Motion Sensor: + State: Enabled""" + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(return_value=osx_platform_data)}, + ): + + ret = core._osx_platform_data() + assert ret == { + "model_name": "MacBook Pro", + "smc_version": "1.62f7", + "system_serialnumber": "W80322MWATM", + } + + osx_platform_data = """Hardware: + + Hardware Overview: + + Model Name: MacBook Pro + Model Identifier: MacBookPro7,1 + Processor Name: Intel Core 2 Duo + Processor Speed: 2.4 GHz + Number of Processors: 1 + Total Number of Cores: 2 + L2 Cache: 3 MB + Memory: 16 GB + System Firmware Version: 68.0.0.0.0 + Boot ROM Version: 139.0.0.0.0 + OS Loader Version: 540.120.3~22 + SMC Version (system): 1.62f7 + Serial Number (system): W80322MWATM + Hardware UUID: 3FA5BDA2-A740-5DF3-8A97-D9D4DB1CE24A + Provisioning UDID: 3FA5BDA2-A740-5DF3-8A97-D9D4DB1CE24A + Sudden Motion Sensor: + State: Enabled""" + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(return_value=osx_platform_data)}, + ): + + ret = core._osx_platform_data() + assert ret == { + "model_name": "MacBook Pro", + "smc_version": "1.62f7", + "system_serialnumber": "W80322MWATM", + "boot_rom_version": "139.0.0.0.0", + } + + +def test__parse_junos_showver(): + """ + test _parse_junos_showver + """ + + txt = b"""Hostname: R1-MX960-re0 +Model: mx960 +Junos: 18.2R3-S2.9 +JUNOS Software Release [18.2R3-S2.9]""" + + ret = core._parse_junos_showver(txt) + assert ret == { + "model": "mx960", + "osrelease": "18.2R3-S2.9", + "osmajorrelease": "Junos: 18", + "osrelease_info": ["Junos: 18", "2R3-S2", "9"], + } + + txt = b"""Model: mx240 +Junos: 15.1F2.8 +JUNOS OS Kernel 64-bit [20150814.313820_builder_stable_10] +JUNOS OS runtime [20150814.313820_builder_stable_10] +JUNOS OS time zone information [20150814.313820_builder_stable_10] +JUNOS OS 32-bit compatibility [20150814.313820_builder_stable_10] +JUNOS py base [20150814.204717_builder_junos_151_f2] +JUNOS OS crypto [20150814.313820_builder_stable_10] +JUNOS network stack and utilities [20150814.204717_builder_junos_151_f2] +JUNOS libs compat32 [20150814.204717_builder_junos_151_f2] +JUNOS runtime [20150814.204717_builder_junos_151_f2] +JUNOS platform support [20150814.204717_builder_junos_151_f2] +JUNOS modules [20150814.204717_builder_junos_151_f2] +JUNOS libs [20150814.204717_builder_junos_151_f2] +JUNOS daemons [20150814.204717_builder_junos_151_f2] +JUNOS FIPS mode utilities [20150814.204717_builder_junos_151_f2]""" + + ret = core._parse_junos_showver(txt) + assert ret == { + "model": "mx240", + "osrelease": "15.1F2.8", + "osmajorrelease": "Junos: 15", + "osrelease_info": ["Junos: 15", "1F2", "8"], + "kernelversion": "JUNOS OS Kernel 64-bit [20150814.313820_builder_stable_10]", + "kernelrelease": "20150814.313820_builder_stable_10", + } + + +def test__bsd_cpudata_freebsd(): + """ + test _bsd_cpudata for FreeBSD + """ + osdata = {"kernel": "FreeBSD"} + mock_cmd_run = ["1", "amd64", "Intel(R) Core(TM) i7-10850H CPU @ 2.7.0GHz"] + + dmesg_mock = """Copyright (c) 1992-2021 The FreeBSD Project. +Copyright (c) 1979, 1980, 1983, 1986, 1988, 1989, 1991, 1992, 1993, 1994 + The Regents of the University of California. All rights reserved. +FreeBSD is a registered trademark of The FreeBSD Foundation. +FreeBSD 13.2-RELEASE releng/13.2-n254617-525ecfdad597 GENERIC amd64 +FreeBSD clang version 14.0.5 (https://github.com/llvm/llvm-project.git llvmorg-14.0.5-0-gc12386ae247c) +VT(vga): text 80x25 +CPU: Intel(R) Core(TM) i7-10850H CPU @ 2.70GHz (2712.13-MHz K8-class CPU) + Origin="GenuineIntel" Id=0xa0652 Family=0x6 Model=0xa5 Stepping=2 + Features=0x1783fbff + Features2=0x5eda220b + AMD Features=0x28100800 + AMD Features2=0x121 + Structured Extended Features=0x842529 + Structured Extended Features3=0x30000400 + TSC: P-state invariant +real memory = 1073676288 (1023 MB) +avail memory = 995774464 (949 MB) +Event timer "LAPIC" quality 100 +ACPI APIC Table: +random: registering fast source Intel Secure Key RNG +random: fast provider: "Intel Secure Key RNG" +random: unblocking device. +ioapic0: MADT APIC ID 1 != hw id 0 +ioapic0 irqs 0-23 +random: entropy device external interface +kbd1 at kbdmux0 +vtvga0: +smbios0: at iomem 0xfff60-0xfff7e +smbios0: Version: 2.5, BCD Revision: 2.5 +aesni0: +acpi0: +acpi0: Power Button (fixed) +acpi0: Sleep Button (fixed) +cpu0: on acpi0 +attimer0: port 0x40-0x43,0x50-0x53 on acpi0 +Timecounter "i8254" frequency 1193182 Hz quality 0 +Event timer "i8254" frequency 1193182 Hz quality 100 +Timecounter "ACPI-fast" frequency 3579545 Hz quality 900 +acpi_timer0: <32-bit timer at 3.579545MHz> port 0x4008-0x400b on acpi0 +pcib0: port 0xcf8-0xcff on acpi0 +pci0: on pcib0 +isab0: at device 1.0 on pci0 +isa0: on isab0 +atapci0: port 0x1f0-0x1f7,0x3f6,0x170-0x177,0x376,0xd000-0xd00f at device 1.1 on pci0 +ata0: at channel 0 on atapci0 +ata1: at channel 1 on atapci0 +vgapci0: port 0xd010-0xd01f mem 0xe0000000-0xe3ffffff,0xf0000000-0xf01fffff irq 18 at device 2.0 on pci0 +vgapci0: Boot video device +em0: port 0xd020-0xd027 mem 0xf0200000-0xf021ffff irq 19 at device 3.0 on pci0 +em0: Using 1024 TX descriptors and 1024 RX descriptors +em0: Ethernet address: 08:00:27:ae:76:42 +em0: netmap queues/slots: TX 1/1024, RX 1/1024 +pcm0: port 0xd100-0xd1ff,0xd200-0xd23f irq 21 at device 5.0 on pci0 +pcm0: +ohci0: mem 0xf0804000-0xf0804fff irq 22 at device 6.0 on pci0 +usbus0 on ohci0 +pci0: at device 7.0 (no driver attached) +ehci0: mem 0xf0805000-0xf0805fff irq 19 at device 11.0 on pci0 +usbus1: EHCI version 1.0 +usbus1 on ehci0 +battery0: on acpi0 +acpi_acad0: on acpi0 +atkbdc0: port 0x60,0x64 irq 1 on acpi0 +atkbd0: irq 1 on atkbdc0 +kbd0 at atkbd0 +atkbd0: [GIANT-LOCKED] +psm0: irq 12 on atkbdc0 +psm0: [GIANT-LOCKED] +WARNING: Device "psm" is Giant locked and may be deleted before FreeBSD 14.0. +psm0: model IntelliMouse Explorer, device ID 4 +orm0: at iomem 0xc0000-0xc7fff pnpid ORM0000 on isa0 +vga0: at port 0x3c0-0x3df iomem 0xa0000-0xbffff pnpid PNP0900 on isa0 +atrtc0: at port 0x70 irq 8 on isa0 +atrtc0: registered as a time-of-day clock, resolution 1.000000s +Event timer "RTC" frequency 32768 Hz quality 0 +atrtc0: non-PNP ISA device will be removed from GENERIC in FreeBSD 14. +Timecounter "TSC-low" frequency 1356006904 Hz quality 1000 +Timecounters tick every 10.000 msec +ZFS filesystem version: 5 +ZFS storage pool version: features support (5000) +usbus0: 12Mbps Full Speed USB v1.0 +usbus1: 480Mbps High Speed USB v2.0 +pcm0: measured ac97 link rate at 44717 Hz +ugen1.1: at usbus1 +uhub0 on usbus1 +uhub0: on usbus1 +ugen0.1: at usbus0 +uhub1 on usbus0 +uhub1: on usbus0 +Trying to mount root from zfs:zroot/ROOT/default []... +uhub1: 12 ports with 12 removable, self powered +ada0 at ata0 bus 0 scbus0 target 0 lun 0 +ada0: ATA-6 device +ada0: Serial Number VBf824a3f1-4ad9d778 +ada0: 33.300MB/s transfers (UDMA2, PIO 65536bytes) +ada0: 16384MB (33554432 512 byte sectors) +Root mount waiting for: usbus1 +Root mount waiting for: usbus1 +Root mount waiting for: usbus1 +Root mount waiting for: usbus1 +Root mount waiting for: usbus1 +uhub0: 12 ports with 12 removable, self powered +intsmb0: irq 23 at device 7.0 on pci0 +intsmb0: intr IRQ 9 enabled revision 0 +smbus0: on intsmb0 +lo0: link state changed to UP +em0: link state changed to UP""" + + with patch("salt.utils.path.which", return_value="/sbin/sysctl"): + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + with patch("os.path.isfile", return_value=True): + with patch("salt.utils.files.fopen", mock_open(read_data=dmesg_mock)): + ret = core._bsd_cpudata(osdata) + assert "num_cpus" in ret + assert ret["num_cpus"] == 1 + + assert "cpuarch" in ret + assert ret["cpuarch"] == "amd64" + + assert "cpu_model" in ret + assert ( + ret["cpu_model"] == "Intel(R) Core(TM) i7-10850H CPU @ 2.7.0GHz" + ) + + assert "cpu_flags" in ret + assert ret["cpu_flags"] == [ + "FPU", + "VME", + "DE", + "PSE", + "TSC", + "MSR", + "PAE", + "MCE", + "CX8", + "APIC", + "SEP", + "MTRR", + "PGE", + "MCA", + "CMOV", + "PAT", + "PSE36", + "MMX", + "FXSR", + "SSE", + "SSE2", + "HTT", + "SSE3", + "PCLMULQDQ", + "MON", + "SSSE3", + "CX16", + "PCID", + "SSE4.1", + "SSE4.2", + "MOVBE", + "POPCNT", + "AESNI", + "XSAVE", + "OSXSAVE", + "AVX", + "RDRAND", + "SYSCALL", + "NX", + "RDTSCP", + "LM", + "LAHF", + "ABM", + "Prefetch", + "FSGSBASE", + "BMI1", + "AVX2", + "BMI2", + "INVPCID", + "NFPUSG", + "RDSEED", + "CLFLUSHOPT", + "MD_CLEAR", + "L1DFL", + "ARCH_CAP", + ] + + +def test__bsd_cpudata_netbsd(): + """ + test _bsd_cpudata for NetBSD + """ + osdata = {"kernel": "NetBSD"} + mock_cpuctl_identify = """cpu0: highest basic info 00000016 +cpu0: highest extended info 80000008 +cpu0: "Intel(R) Core(TM) i7-10850H CPU @ 2.70GHz" +cpu0: Intel 10th gen Core (Comet Lake) (686-class), 2753.71 MHz +cpu0: family 0x6 model 0xa5 stepping 0x2 (id 0xa0652) +cpu0: features 0x178bfbff +cpu0: features 0x178bfbff +cpu0: features1 0x5eda220b +cpu0: features1 0x5eda220b +cpu0: features2 0x28100800 +cpu0: features3 0x121 +cpu0: features5 0x842529 +cpu0: features5 0x842529 +cpu0: features7 0x30000400 +cpu0: xsave features 0x7 +cpu0: xsave area size: current 832, maximum 832, xgetbv enabled +cpu0: enabled xsave 0x7 +cpu0: I-cache: 32KB 64B/line 8-way, D-cache: 32KB 64B/line 8-way +cpu0: L2 cache: 256KB 64B/line 4-way +cpu0: L3 cache: 12MB 64B/line 16-way +cpu0: 64B prefetching +cpu0: ITLB: 64 4KB entries 8-way, 8 2M/4M entries +cpu0: DTLB: 64 4KB entries 4-way, 4 1GB entries 4-way +cpu0: L2 STLB: 1536 4KB entries 6-way +cpu0: Initial APIC ID 0 +cpu0: Cluster/Package ID 0 +cpu0: Core ID 0 +cpu0: SMT ID 0 +cpu0: monitor-line size 64 +cpu0: SEF highest subleaf 00000000 +cpu0: Power Management features: 0x100 +cpu0: microcode version 0x0, platform ID 0""" + mock_cmd_run = [ + "1", + "amd64", + "Intel(R) Core(TM) i7-10850H CPU @ 2.7.0GHz", + mock_cpuctl_identify, + ] + + with patch("salt.utils.path.which", return_value="/sbin/sysctl"): + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + ret = core._bsd_cpudata(osdata) + assert "num_cpus" in ret + assert ret["num_cpus"] == 1 + + assert "cpuarch" in ret + assert ret["cpuarch"] == "amd64" + + assert "cpu_model" in ret + assert ret["cpu_model"] == "Intel(R) Core(TM) i7-10850H CPU @ 2.7.0GHz" + + +def test__bsd_cpudata_darwin(): + """ + test _bsd_cpudata for Darwin + """ + osdata = {"kernel": "Darwin"} + mock_cmd_run = [ + "1", + "x86_64", + "Intel(R) Core(TM)2 Duo CPU P8600 @ 2.40GHz", + "FPU VME DE PSE TSC MSR PAE MCE CX8 APIC SEP MTRR PGE MCA CMOV PAT PSE36 CLFSH DS ACPI MMX FXSR SSE SSE2 SS HTT TM PBE SSE3 DTES64 MON DSCPL VMX SMX EST TM2 SSSE3 CX16 TPR PDCM SSE4.1 XSAVE", + ] + + with patch("salt.utils.path.which", return_value="/sbin/sysctl"): + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + ret = core._bsd_cpudata(osdata) + assert "num_cpus" in ret + assert ret["num_cpus"] == 1 + + assert "cpuarch" in ret + assert ret["cpuarch"] == "x86_64" + + assert "cpu_model" in ret + assert ret["cpu_model"] == "Intel(R) Core(TM)2 Duo CPU P8600 @ 2.40GHz" + + assert "cpu_flags" in ret + assert ret["cpu_flags"] == [ + "FPU", + "VME", + "DE", + "PSE", + "TSC", + "MSR", + "PAE", + "MCE", + "CX8", + "APIC", + "SEP", + "MTRR", + "PGE", + "MCA", + "CMOV", + "PAT", + "PSE36", + "CLFSH", + "DS", + "ACPI", + "MMX", + "FXSR", + "SSE", + "SSE2", + "SS", + "HTT", + "TM", + "PBE", + "SSE3", + "DTES64", + "MON", + "DSCPL", + "VMX", + "SMX", + "EST", + "TM2", + "SSSE3", + "CX16", + "TPR", + "PDCM", + "SSE4.1", + "XSAVE", + ] + + +def test__bsd_cpudata_openbsd(): + """ + test _bsd_cpudata for OpenBSD + """ + osdata = {"kernel": "OpenBSD"} + mock_cmd_run = ["1", "amd64", "Intel(R) Core(TM) i7-10850H CPU @ 2.7.0GHz", "amd64"] + + with patch("salt.utils.path.which", return_value="/sbin/sysctl"): + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + ret = core._bsd_cpudata(osdata) + assert "num_cpus" in ret + assert ret["num_cpus"] == 1 + + assert "cpuarch" in ret + assert ret["cpuarch"] == "amd64" + + assert "cpu_model" in ret + assert ret["cpu_model"] == "Intel(R) Core(TM) i7-10850H CPU @ 2.7.0GHz" + + +def test__netbsd_gpu_data(): + """ + test _netbsd_gpu_data + """ + mock_pcictl = """000:00:0: Intel 82441FX (PMC) PCI and Memory Controller (host bridge, revision 0x02) +000:01:0: Intel 82371SB (PIIX3) PCI-ISA Bridge (ISA bridge) +000:01:1: Intel 82371AB (PIIX4) IDE Controller (IDE mass storage, interface 0x8a, revision 0x01) +000:02.0: VGA compatible controller: Intel Corporation CometLake-H GT2 [UHD Graphics] (rev 05) +000:02:0: Intel CometLake-H GT2 [UHD Graphics] (VGA display) +000:03:0: Intel i82540EM 1000baseT Ethernet (ethernet network, revision 0x02) +000:04:0: VirtualBox Guest Service (miscellaneous system) +000:05:0: Intel 82801AA AC-97 Audio Controller (audio multimedia, revision 0x01) +000:06:0: Apple Computer Intrepid USB Controller (USB serial bus, OHCI) +000:07:0: Intel 82371AB (PIIX4) Power Management Controller (miscellaneous bridge, revision 0x08) +000:11:0: Intel 82801FB/FR USB EHCI Controller (USB serial bus, EHCI)""" + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(return_value=mock_pcictl)}, + ): + ret = core._netbsd_gpu_data() + assert ret == { + "num_gpus": 1, + "gpus": [{"vendor": "Intel", "model": "CometLake-H GT2 [UHD Graphics]"}], + } + + with patch.dict(core.__salt__, {"cmd.run": MagicMock(side_effect=OSError)}): + ret = core._netbsd_gpu_data() + assert ret == {"gpus": [], "num_gpus": 0} + + +def test__bsd_memdata(): + """ + test _bsd_memdata + """ + osdata = {"kernel": "OpenBSD"} + + with patch("salt.utils.path.which", side_effect=["/sbin/sysctl", "/sbin/swapctl"]): + + mock_cmd_run = [ + "1073278976", + "total: 1048559 KBytes allocated, 0 KBytes used, 1048559 KBytes available", + ] + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + ret = core._bsd_memdata(osdata) + assert ret == {"mem_total": 1023, "swap_total": 0} + + osdata = {"kernel": "NetBSD"} + + with patch("salt.utils.path.which", side_effect=["/sbin/sysctl", "/sbin/swapctl"]): + + mock_cmd_run = [ + "1073278976", + "total: 1048559 KBytes allocated, 0 KBytes used, 1048559 KBytes available", + ] + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + ret = core._bsd_memdata(osdata) + assert ret == {"mem_total": 1023, "swap_total": 0} + + with patch("salt.utils.path.which", side_effect=["/sbin/sysctl", "/sbin/swapctl"]): + + mock_cmd_run = [ + "-", + "1073278976", + "total: 1048559 KBytes allocated, 0 KBytes used, 1048559 KBytes available", + ] + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + ret = core._bsd_memdata(osdata) + assert ret == {"mem_total": 1023, "swap_total": 0} + + with patch("salt.utils.path.which", side_effect=["/sbin/sysctl", "/sbin/swapctl"]): + + mock_cmd_run = ["-", "1073278976", "no swap devices configured"] + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + ret = core._bsd_memdata(osdata) + assert ret == {"mem_total": 1023, "swap_total": 0} + + with patch("salt.utils.path.which", side_effect=["/sbin/sysctl", "/sbin/swapctl"]): + + mock_cmd_run = ["-", "1073278976", "no swap devices configured"] + + with patch.dict( + core.__salt__, + {"cmd.run": MagicMock(side_effect=mock_cmd_run)}, + ): + ret = core._memdata(osdata) + assert ret == {"mem_total": 1023, "swap_total": 0} + + +def test__ps(): + """ + test _ps + """ + osdata = {"os_family": ""} + + for bsd in ["FreeBSD", "NetBSD", "OpenBSD", "MacOS"]: + osdata = {"os": bsd} + ret = core._ps(osdata) + assert ret == {"ps": "ps auxwww"} + + osdata = {"os_family": "Solaris", "os": ""} + ret = core._ps(osdata) + assert ret == {"ps": "/usr/ucb/ps auxwww"} + + osdata = {"os": "Windows", "os_family": ""} + ret = core._ps(osdata) + assert ret == {"ps": "tasklist.exe"} + + osdata = {"os": "", "os_family": "AIX"} + ret = core._ps(osdata) + assert ret == {"ps": "/usr/bin/ps auxww"} + + osdata = {"os": "", "os_family": "NILinuxRT"} + ret = core._ps(osdata) + assert ret == {"ps": "ps -o user,pid,ppid,tty,time,comm"} + + osdata = {"os": "", "os_family": "", "virtual": "openvzhn"} + ret = core._ps(osdata) + assert ret == { + "ps": ( + 'ps -fH -p $(grep -l "^envID:[[:space:]]*0\\$" ' + '/proc/[0-9]*/status | sed -e "s=/proc/\\([0-9]*\\)/.*=\\1=") ' + "| awk '{ $7=\"\"; print }'" + ) + } diff --git a/tests/pytests/unit/modules/dockermod/test_module.py b/tests/pytests/unit/modules/dockermod/test_module.py index 8fb780649767..abfd101540df 100644 --- a/tests/pytests/unit/modules/dockermod/test_module.py +++ b/tests/pytests/unit/modules/dockermod/test_module.py @@ -9,14 +9,19 @@ import salt.loader import salt.modules.dockermod as docker_mod import salt.utils.platform +import salt.utils.versions from salt.exceptions import CommandExecutionError, SaltInvocationError from tests.support.mock import MagicMock, Mock, call, patch log = logging.getLogger(__name__) -pytest.importorskip( +docker = pytest.importorskip( "docker", reason="The python 'docker' package must be installed to run these tests" ) +docker_older_than_1_5_0_skip_marker = pytest.mark.skipif( + salt.utils.versions.Version(docker.__version__) < "1.5.0", + reason="docker module must be installed to run this test or is too old. <=1.5.0", +) @pytest.fixture @@ -354,10 +359,7 @@ def config_get_enabled(val, default): mine_mock.assert_called_once() -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_list_networks(): """ test list networks. @@ -378,10 +380,7 @@ def test_list_networks(): client.networks.assert_called_once_with(names=["foo"], ids=["01234"]) -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_create_network(): """ test create network. @@ -422,10 +421,7 @@ def test_create_network(): ) -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_remove_network(): """ test remove network. @@ -444,10 +440,7 @@ def test_remove_network(): client.remove_network.assert_called_once_with("foo") -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_inspect_network(): """ test inspect network. @@ -466,10 +459,7 @@ def test_inspect_network(): client.inspect_network.assert_called_once_with("foo") -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_connect_container_to_network(): """ test connect_container_to_network @@ -491,10 +481,7 @@ def test_connect_container_to_network(): client.connect_container_to_network.assert_called_once_with("container", "foo") -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_disconnect_container_from_network(): """ test disconnect_container_from_network @@ -513,10 +500,7 @@ def test_disconnect_container_from_network(): client.disconnect_container_from_network.assert_called_once_with("container", "foo") -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_list_volumes(): """ test list volumes. @@ -539,10 +523,7 @@ def test_list_volumes(): ) -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_create_volume(): """ test create volume. @@ -569,10 +550,7 @@ def test_create_volume(): ) -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_remove_volume(): """ test remove volume. @@ -591,10 +569,7 @@ def test_remove_volume(): client.remove_volume.assert_called_once_with("foo") -@pytest.mark.skipif( - docker_mod.docker.version_info < (1, 5, 0), - reason="docker module must be installed to run this test or is too old. >=1.5.0", -) +@docker_older_than_1_5_0_skip_marker def test_inspect_volume(): """ test inspect volume. diff --git a/tests/pytests/unit/modules/file/test_file_block_replace.py b/tests/pytests/unit/modules/file/test_file_block_replace.py index 577e6004a427..71e2d9708950 100644 --- a/tests/pytests/unit/modules/file/test_file_block_replace.py +++ b/tests/pytests/unit/modules/file/test_file_block_replace.py @@ -57,7 +57,10 @@ def configure_loader_modules(): ret.update( { win_dacl: {"__opts__": opts}, - win_file: {"__utils__": {"dacl.check_perms": win_dacl.check_perms}}, + win_file: { + "__utils__": {"dacl.check_perms": win_dacl.check_perms}, + "__opts__": opts, + }, } ) diff --git a/tests/pytests/unit/modules/file/test_file_selinux.py b/tests/pytests/unit/modules/file/test_file_selinux.py index fea2e7df1c9b..144b7c93e3c5 100644 --- a/tests/pytests/unit/modules/file/test_file_selinux.py +++ b/tests/pytests/unit/modules/file/test_file_selinux.py @@ -101,6 +101,20 @@ def test_selinux_setcontext_persist(tfile2): assert result == "system_u:object_r:user_tmp_t:s0" +def test_selinux_setcontext_persist_change(tfile2): + """ + Test set selinux context with persist=True + Assumes default selinux attributes on temporary files + """ + result = filemod.set_selinux_context(tfile2, user="system_u", persist=True) + assert result == "system_u:object_r:user_tmp_t:s0" + + result = filemod.set_selinux_context( + tfile2, user="unconfined_u", type="net_conf_t", persist=True + ) + assert result == "unconfined_u:object_r:net_conf_t:s0" + + def test_file_check_perms(tfile3): expected_result = ( { diff --git a/tests/pytests/unit/modules/test_aptpkg.py b/tests/pytests/unit/modules/test_aptpkg.py index ff3023678ada..7958a206a368 100644 --- a/tests/pytests/unit/modules/test_aptpkg.py +++ b/tests/pytests/unit/modules/test_aptpkg.py @@ -1,11 +1,3 @@ -""" - :synopsis: Unit Tests for Advanced Packaging Tool module 'module.aptpkg' - :platform: Linux - :maturity: develop - versionadded:: 2017.7.0 -""" - - import copy import importlib import logging @@ -23,7 +15,8 @@ CommandNotFoundError, SaltInvocationError, ) -from tests.support.mock import MagicMock, Mock, call, mock_open, patch +from salt.utils.odict import OrderedDict +from tests.support.mock import MagicMock, Mock, call, patch try: from aptsources.sourceslist import ( # pylint: disable=unused-import @@ -275,6 +268,41 @@ def test_add_repo_key(repo_keys_var): ) +def test_add_repo_key_none_specified(repo_keys_var): + """ + Test - Add a repo key when we do not specify any arguments + """ + with patch( + "salt.modules.aptpkg.get_repo_keys", MagicMock(return_value=repo_keys_var) + ): + mock = MagicMock(return_value={"retcode": 0, "stdout": "OK"}) + with patch.dict(aptpkg.__salt__, {"cmd.run_all": mock}): + with pytest.raises(TypeError) as err: + aptpkg.add_repo_key() + assert err.value.args[0] == "add_repo_key() takes at least 1 argument (0 given)" + + +def test_add_repo_key_no_keyfile(repo_keys_var, caplog, tmp_path): + """ + Test - Add a repo key when aptkey is false + and keyfile not specified when using a keyserver + """ + with patch("salt.modules.aptpkg.get_repo_keys", MagicMock(return_value={})): + mock = MagicMock(return_value={"retcode": 0, "stdout": "OK"}) + with patch.dict(aptpkg.__salt__, {"cmd.run_all": mock}): + ret = aptpkg.add_repo_key( + keyserver="keyserver.ubuntu.com", + keyid="FBB75451", + keydir=tmp_path, + aptkey=False, + ) + assert ret is False + assert ( + "You must define the name of the key file to save the key" + in caplog.text + ) + + def test_add_repo_key_failed(repo_keys_var): """ Test - Add a repo key using incomplete input data. @@ -289,6 +317,56 @@ def test_add_repo_key_failed(repo_keys_var): aptpkg.add_repo_key(**kwargs) +def test_add_repo_key_keydir_not_exists(repo_keys_var, tmp_path, caplog): + """ + Test - Add a repo key when aptkey is False + and the keydir does not exist + """ + with patch( + "salt.modules.aptpkg.get_repo_keys", MagicMock(return_value=repo_keys_var) + ): + mock = MagicMock(return_value={"retcode": 0, "stdout": "OK"}) + with patch.dict(aptpkg.__salt__, {"cmd.run_all": mock}): + ret = aptpkg.add_repo_key( + keyserver="keyserver.ubuntu.com", + keyid="FBB75451", + keyfile="test-key.gpg", + aptkey=False, + keydir=str(tmp_path / "doesnotexist"), + ) + assert "does not exist. Please create this directory" in caplog.text + assert ret is False + + +@pytest.mark.parametrize( + "kwargs, err_msg", + [ + ( + {"keyid": "FBB75451", "keyfile": "test-key.gpg"}, + "No keyserver specified for keyid", + ), + ( + {"keyserver": "keyserver.ubuntu.com", "keyfile": "test-key.gpg"}, + "No keyid or keyid too short for keyserver", + ), + ], +) +def test_add_repo_key_keyserver_keyid_not_sepcified( + repo_keys_var, tmp_path, caplog, kwargs, err_msg +): + """ + Test - Add a repo key when and keyid is set without a keyserver + Also test when keyserver is set but without keyid + """ + short_key = list(repo_keys_var.keys())[0][-8:] + with patch("salt.modules.aptpkg.get_repo_keys", MagicMock(return_value={})): + mock = MagicMock(return_value={"retcode": 0, "stdout": "OK"}) + with patch.dict(aptpkg.__salt__, {"cmd.run_all": mock}): + with pytest.raises(SaltInvocationError) as err: + aptpkg.add_repo_key(**kwargs) + assert err_msg in err.value.message + + def test_get_repo_keys(repo_keys_var): """ Test - List known repo key details. @@ -370,6 +448,103 @@ def test_owner(): assert aptpkg.owner(*paths) == "wget" +def test_owner_no_path(): + """ + Test owner when path is not passed + """ + ret = aptpkg.owner() + assert ret == "" + + +def test_owner_doesnotexist(): + """ + Test owner when the path does not exist + """ + mock = MagicMock(return_value="") + with patch.dict(aptpkg.__salt__, {"cmd.run_stdout": mock}): + ret = aptpkg.owner("/doesnotexist") + assert ret == "" + + +def test_get_http_proxy_url_username_passwd(): + """ + Test _get_http_proxy_url when username and passwod set + """ + host = "repo.saltproject.io" + port = "888" + user = "user" + passwd = "password" + mock_conf = MagicMock() + mock_conf.side_effect = [host, port, user, passwd] + patch_conf = patch.dict(aptpkg.__salt__, {"config.option": mock_conf}) + with patch_conf: + ret = aptpkg._get_http_proxy_url() + assert ret == f"http://{user}:{passwd}@{host}:{port}" + + +def test_get_http_proxy_url(): + """ + Test basic functionality for _get_http_proxy_url + """ + host = "repo.saltproject.io" + port = "888" + user = "" + passwd = "" + mock_conf = MagicMock() + mock_conf.side_effect = [host, port, user, passwd] + patch_conf = patch.dict(aptpkg.__salt__, {"config.option": mock_conf}) + with patch_conf: + ret = aptpkg._get_http_proxy_url() + assert ret == f"http://{host}:{port}" + + +def test_get_http_proxy_url_empty(): + """ + Test _get_http_proxy_Url when host and port are empty + """ + host = "" + port = "" + user = "" + passwd = "" + mock_conf = MagicMock() + mock_conf.side_effect = [host, port, user, passwd] + patch_conf = patch.dict(aptpkg.__salt__, {"config.option": mock_conf}) + with patch_conf: + ret = aptpkg._get_http_proxy_url() + assert ret == "" + + +def test_list_upgrades(): + """ + Test basic functinoality for list_upgrades + """ + patch_data = patch("salt.utils.data.is_true", return_value=True) + patch_refresh = patch("salt.modules.aptpkg.refresh_db") + apt_ret = { + "pid": 2791, + "retcode": 0, + "stdout": "Reading package lists...\nBuilding dependency tree...\nReading state information...\nCalculating upgrade...\nThe following NEW packages will be installed:\n linux-cloud-tools-5.15.0-86 linux-cloud-tools-5.15.0-86-generic\n linux-headers-5.15.0-86 linux-headers-5.15.0-86-generic\n linux-image-5.15.0-86-generic linux-modules-5.15.0-86-generic\n linux-modules-extra-5.15.0-86-generic\nThe following packages have been kept back:\n libnetplan0 libsgutils2-2 netplan. io sg3-utils sg3-utils-udev\nThe following packages will be upgraded:\n linux-cloud-tools-virtual linux-generic linux-headers-generic\n linux-image-generic\n4 upgraded, 7 newly installed, 0 to remove and 5 not upgraded.\nInst linux-cloud-tools-5.15.0-86 (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-cloud-tools-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-cloud-tools-virtual [5.15.0.69.67] (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-modules-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64]) []\nInst linux-image-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-modules-extra-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nInst linux-generic [5.15.0.69.67] (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64]) []\nInst linux-image-generic [5.15.0.69.67] (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64]) []\nInst linux-headers-5.15.0-86 (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [all]) []\nInst linux-headers-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64]) []\nInst linux-headers-generic [5.15.0.69.67] (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-cloud-tools-5.15.0-86 (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-cloud-tools-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-cloud-tools-virtual (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-modules-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-image-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-modules-extra-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-generic (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-image-generic (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-headers-5.15.0-86 (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [all])\nConf linux-headers-5.15.0-86-generic (5.15.0-86.96 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])\nConf linux-headers-generic (5.15.0.86.83 Ubuntu:22.04/jammy-updates, Ubuntu:22.04/jammy-security [amd64])", + "stderr": "Running scope as unit: run-r014f3eae66364254b1cdacf701f1ab73.scope", + } + mock_apt = MagicMock(return_value=apt_ret) + patch_apt = patch("salt.modules.aptpkg._call_apt", mock_apt) + with patch_data, patch_refresh, patch_apt: + ret = aptpkg.list_upgrades(dist_upgrade=False) + assert ret == { + "linux-cloud-tools-5.15.0-86": "5.15.0-86.96", + "linux-cloud-tools-5.15.0-86-generic": "5.15.0-86.96", + "linux-cloud-tools-virtual": "5.15.0.86.83", + "linux-modules-5.15.0-86-generic": "5.15.0-86.96", + "linux-image-5.15.0-86-generic": "5.15.0-86.96", + "linux-modules-extra-5.15.0-86-generic": "5.15.0-86.96", + "linux-generic": "5.15.0.86.83", + "linux-image-generic": "5.15.0.86.83", + "linux-headers-5.15.0-86": "5.15.0-86.96", + "linux-headers-5.15.0-86-generic": "5.15.0-86.96", + "linux-headers-generic": "5.15.0.86.83", + } + + def test_refresh_db(apt_q_update_var): """ Test - Updates the APT database to latest packages based upon repositories. @@ -434,6 +609,45 @@ def test_install(install_var): kwargs = {"force_conf_new": True} assert aptpkg.install(name="tmux", **kwargs) == install_var + patch_kwargs = { + "__salt__": { + "pkg_resource.parse_targets": MagicMock( + return_value=({"tmux": None}, "repository") + ), + "pkg_resource.sort_pkglist": MagicMock(), + "pkg_resource.stringify": MagicMock(), + "cmd.run_stdout": MagicMock(return_value="install ok installed python3\n"), + } + } + mock_call_apt_ret = { + "pid": 48174, + "retcode": 0, + "stdout": "Reading package lists...\nBuilding dependency tree...\nReading state information...\nvim is already the newest version (2:8.2.3995-1ubuntu2.4).\n", + "stderr": "Running scope as unit: run-rc2803bccd0b445a5b00788cd74b4e635.scope", + } + mock_call_apt = MagicMock(return_value=mock_call_apt_ret) + expected_call = call( + [ + "apt-get", + "-q", + "-y", + "-o", + "DPkg::Options::=--force-confold", + "-o", + "DPkg::Options::=--force-confdef", + "install", + "tmux", + ], + scope=True, + ) + with patch.multiple(aptpkg, **patch_kwargs): + with patch( + "salt.modules.aptpkg.get_selections", MagicMock(return_value={"hold": []}) + ): + with patch("salt.modules.aptpkg._call_apt", mock_call_apt): + ret = aptpkg.install(name="tmux", scope=True) + assert expected_call in mock_call_apt.mock_calls + def test_remove(uninstall_var): """ @@ -693,6 +907,12 @@ def test_mod_repo_enabled(): """ Checks if a repo is enabled or disabled depending on the passed kwargs. """ + source_type = "deb" + source_uri = "http://cdn-aws.deb.debian.org/debian/" + source_line = "deb http://cdn-aws.deb.debian.org/debian/ stretch main\n" + + mock_source = MockSourceEntry(source_uri, source_type, source_line, False) + with patch.dict( aptpkg.__salt__, {"config.option": MagicMock(), "no_proxy": MagicMock(return_value=False)}, @@ -703,7 +923,9 @@ def test_mod_repo_enabled(): ) as data_is_true: with patch("salt.modules.aptpkg.SourcesList", MagicMock(), create=True): with patch( - "salt.modules.aptpkg.SourceEntry", MagicMock(), create=True + "salt.modules.aptpkg.SourceEntry", + MagicMock(return_value=mock_source), + create=True, ): with patch("pathlib.Path", MagicMock()): repo = aptpkg.mod_repo("foo", enabled=False) @@ -751,14 +973,14 @@ def test_mod_repo_match(): with patch( "salt.modules.aptpkg._split_repo_str", MagicMock( - return_value=( - "deb", - [], - "http://cdn-aws.deb.debian.org/debian/", - "stretch", - ["main"], - "", - ) + return_value={ + "type": "deb", + "architectures": [], + "uri": "http://cdn-aws.deb.debian.org/debian/", + "dist": "stretch", + "comps": ["main"], + "signedby": "", + } ), ): source_line_no_slash = ( @@ -1073,6 +1295,20 @@ def test_expand_repo_def_cdrom(): ) +def test__expand_repo_def_not_repo(): + """ + Checks results from _expand_repo_def + when repo is not in kwargs + """ + with pytest.raises(SaltInvocationError) as err: + aptpkg._expand_repo_def( + os_name="debian", + os_codename="stretch", + architectures="amd64", + ) + assert err.value.message == "missing 'repo' argument" + + def test_list_pkgs(): """ Test packages listing. @@ -1251,17 +1487,17 @@ def test_call_apt_dpkg_lock(): ] cmd_mock = MagicMock(side_effect=cmd_side_effect) - cmd_call = ( + cmd_call = [ call( ["dpkg", "-l", "python"], - env={}, - ignore_retcode=False, output_loglevel="quiet", python_shell=True, + env={}, + ignore_retcode=False, username="Darth Vader", ), - ) - expected_calls = [cmd_call * 5] + ] + expected_calls = cmd_call * 5 with patch.dict( aptpkg.__salt__, @@ -1281,7 +1517,7 @@ def test_call_apt_dpkg_lock(): # We should attempt to call the cmd 5 times assert cmd_mock.call_count == 5 - cmd_mock.has_calls(expected_calls) + cmd_mock.assert_has_calls(expected_calls) def test_services_need_restart_checkrestart_missing(): @@ -1312,31 +1548,35 @@ def test_services_need_restart(): ] +@pytest.fixture +def _test_sourceslist_multiple_comps_fs(fs): + fs.create_dir("/etc/apt/sources.list.d") + fs.create_file( + "/etc/apt/sources.list", + contents="deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted", + ) + yield + + @pytest.mark.skipif( HAS_APTSOURCES is True, reason="Only run test with python3-apt library is missing." ) +@pytest.mark.usefixtures("_test_sourceslist_multiple_comps_fs") def test_sourceslist_multiple_comps(): """ Test SourcesList when repo has multiple comps """ - repo_line = "deb http://archive.ubuntu.com/ubuntu/ focal-updates main restricted" with patch.object(aptpkg, "HAS_APT", return_value=True): - with patch("salt.utils.files.fopen", mock_open(read_data=repo_line)): - with patch("pathlib.Path.is_file", side_effect=[True, False]): - sources = aptpkg.SourcesList() - for source in sources: - assert source.type == "deb" - assert source.uri == "http://archive.ubuntu.com/ubuntu/" - assert source.comps == ["main", "restricted"] - assert source.dist == "focal-updates" + sources = aptpkg.SourcesList() + for source in sources: + assert source.type == "deb" + assert source.uri == "http://archive.ubuntu.com/ubuntu/" + assert source.comps == ["main", "restricted"] + assert source.dist == "focal-updates" -@pytest.mark.skipif( - HAS_APTSOURCES is True, reason="Only run test with python3-apt library is missing." -) -@pytest.mark.parametrize( - "repo_line", - [ +@pytest.fixture( + params=( "deb [ arch=amd64 ] http://archive.ubuntu.com/ubuntu/ focal-updates main restricted", "deb [arch=amd64 ] http://archive.ubuntu.com/ubuntu/ focal-updates main restricted", "deb [arch=amd64 test=one ] http://archive.ubuntu.com/ubuntu/ focal-updates main restricted", @@ -1344,21 +1584,722 @@ def test_sourceslist_multiple_comps(): "deb [ arch=amd64,armel test=one ] http://archive.ubuntu.com/ubuntu/ focal-updates main restricted", "deb [ arch=amd64,armel test=one] http://archive.ubuntu.com/ubuntu/ focal-updates main restricted", "deb [arch=amd64] http://archive.ubuntu.com/ubuntu/ focal-updates main restricted", - ], + ) +) +def repo_line(request, fs): + fs.create_dir("/etc/apt/sources.list.d") + fs.create_file("/etc/apt/sources.list", contents=request.param) + yield request.param + + +@pytest.mark.skipif( + HAS_APTSOURCES is True, reason="Only run test with python3-apt library is missing." ) def test_sourceslist_architectures(repo_line): """ Test SourcesList when architectures is in repo """ - with patch("salt.utils.files.fopen", mock_open(read_data=repo_line)): - with patch("pathlib.Path.is_file", side_effect=[True, False]): - sources = aptpkg.SourcesList() - for source in sources: - assert source.type == "deb" - assert source.uri == "http://archive.ubuntu.com/ubuntu/" - assert source.comps == ["main", "restricted"] - assert source.dist == "focal-updates" - if "," in repo_line: - assert source.architectures == ["amd64", "armel"] - else: - assert source.architectures == ["amd64"] + sources = aptpkg.SourcesList() + for source in sources: + assert source.type == "deb" + assert source.uri == "http://archive.ubuntu.com/ubuntu/" + assert source.comps == ["main", "restricted"] + assert source.dist == "focal-updates" + if "," in repo_line: + assert source.architectures == ["amd64", "armel"] + else: + assert source.architectures == ["amd64"] + + +@pytest.mark.parametrize( + "pkg,arch", + [ + ("zsh", "amd64"), + ("php", "x86_64"), + ], +) +def test_parse_arch(pkg, arch): + """ + Test parse_arch when we pass in + valid package and arch names + """ + ret = aptpkg.parse_arch(f"{pkg}:{arch}") + assert ret == {"name": pkg, "arch": arch} + + +@pytest.mark.parametrize( + "pkg", + [ + "php", + ], +) +def test_parse_arch_invalid(pkg): + """ + Test parse_arch when we pass in + invalid package and arch names + """ + ret = aptpkg.parse_arch(f"{pkg}") + assert ret == {"name": pkg, "arch": None} + + +def test_latest_version_repo_kwarg(): + """ + Test latest_version when `repo` is passed in as a kwarg + """ + with pytest.raises(SaltInvocationError) as exc: + aptpkg.latest_version("php", repo="https://repo.com") + assert exc.value.message == "The 'repo' argument is invalid, use 'fromrepo' instead" + + +def test_latest_version_names_empty(): + """ + Test latest_version when names is empty + """ + ret = aptpkg.latest_version() + assert ret == "" + + +def test_latest_version_fromrepo(): + """ + test latest_version when `fromrepo` is passed in as a kwarg + """ + version = "5.15.0.86.83" + fromrepo = "jammy-updates" + list_ret = {"linux-cloud-tools-virtual": [version]} + apt_ret = { + "pid": 4361, + "retcode": 0, + "stdout": "linux-cloud-tools-virtual:\n" + f"Installed: 5.15.0.69.67\n Candidate: {version}\n Version" + f"table:\n {version} 990\n 990" + f"https://mirrors.edge.kernel.org/ubuntu {fromrepo}/main amd64" + "Packages\n 500 https://mirrors.edge.kernel.org/ubuntu" + "jammy-security/main amd64 Packages\n ***5.15.0.69.67 100\n" + "100 /var/lib/dpkg/status\n 5.15.0.25.27 500\n 500" + "https://mirrors.edge.kernel.org/ubuntu jammy/main amd64 Packages", + "stderr": "", + } + mock_apt = MagicMock(return_value=apt_ret) + patch_apt = patch("salt.modules.aptpkg._call_apt", mock_apt) + mock_list_pkgs = MagicMock(return_value=list_ret) + patch_list_pkgs = patch("salt.modules.aptpkg.list_pkgs", mock_list_pkgs) + with patch_apt, patch_list_pkgs: + ret = aptpkg.latest_version( + "linux-cloud-tools-virtual", + fromrepo=fromrepo, + refresh=False, + show_installed=True, + ) + assert ret == version + assert mock_apt.call_args == call( + [ + "apt-cache", + "-q", + "policy", + "linux-cloud-tools-virtual", + "-o", + f"APT::Default-Release={fromrepo}", + ], + scope=False, + ) + + +def test_latest_version_fromrepo_multiple_names(): + """ + test latest_version when multiple names of pkgs are pased + """ + version = "5.15.0.86.83" + fromrepo = "jammy-updates" + list_ret = { + "linux-cloud-tools-virtual": ["5.15.0.69.67"], + "linux-generic": ["5.15.0.69.67"], + } + apt_ret_cloud = { + "pid": 4361, + "retcode": 0, + "stdout": "linux-cloud-tools-virtual:\n" + f"Installed: 5.15.0.69.67\n Candidate: {version}\n Version" + f"table:\n {version} 990\n 990" + f"https://mirrors.edge.kernel.org/ubuntu {fromrepo}/main amd64" + "Packages\n 500 https://mirrors.edge.kernel.org/ubuntu" + "jammy-security/main amd64 Packages\n ***5.15.0.69.67 100\n" + "100 /var/lib/dpkg/status\n 5.15.0.25.27 500\n 500" + "https://mirrors.edge.kernel.org/ubuntu jammy/main amd64 Packages", + "stderr": "", + } + apt_ret_generic = { + "pid": 4821, + "retcode": 0, + "stdout": "linux-generic:\n" + f"Installed: 5.15.0.69.67\n Candidate: {version}\n" + f"Version table:\n {version} 990\n 990" + "https://mirrors.edge.kernel.org/ubuntu" + "jammy-updates/main amd64 Packages\n 500" + "https://mirrors.edge.kernel.org/ubuntu" + "jammy-security/main amd64 Packages\n *** 5.15.0.69.67" + "100\n 100 /var/lib/dpkg/status\n 5.15.0.25.27" + "500\n 500 https://mirrors.edge.kernel.org/ubuntu" + "jammy/main amd64 Packages", + "stderr": "", + } + + mock_apt = MagicMock() + mock_apt.side_effect = [apt_ret_cloud, apt_ret_generic] + patch_apt = patch("salt.modules.aptpkg._call_apt", mock_apt) + mock_list_pkgs = MagicMock(return_value=list_ret) + patch_list_pkgs = patch("salt.modules.aptpkg.list_pkgs", mock_list_pkgs) + with patch_apt, patch_list_pkgs: + ret = aptpkg.latest_version( + "linux-cloud-tools-virtual", + "linux-generic", + fromrepo=fromrepo, + refresh=False, + show_installed=True, + ) + assert ret == {"linux-cloud-tools-virtual": version, "linux-generic": version} + assert mock_apt.call_args_list == [ + call( + [ + "apt-cache", + "-q", + "policy", + "linux-cloud-tools-virtual", + "-o", + "APT::Default-Release=jammy-updates", + ], + scope=False, + ), + call( + [ + "apt-cache", + "-q", + "policy", + "linux-generic", + "-o", + "APT::Default-Release=jammy-updates", + ], + scope=False, + ), + ] + + +def test_hold(): + """ + test aptpkg.hold() when passing in the name of a package + """ + set_sel = {"vim": {"old": "install", "new": "hold"}} + get_sel = {"hold": []} + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel) + with patch_get_sel, patch_set_sel: + ret = aptpkg.hold("vim") + assert ret == { + "vim": { + "name": "vim", + "changes": {"old": "install", "new": "hold"}, + "result": True, + "comment": "Package vim is now being held.", + } + } + + +def test_hold_no_name_pkgs(): + """ + test aptpkg.hold when we do not pass in a name or list of pkgs + """ + with pytest.raises(SaltInvocationError) as err: + aptpkg.hold() + assert err.value.message == "One of name, pkgs, or sources must be specified." + + +def test_hold_pkgs_sources(): + """ + test aptpkg.hold when we we set sources and a list of pkgs. + """ + with pytest.raises(SaltInvocationError) as err: + aptpkg.hold( + pkgs=["vim", "apache2"], sources=["http://source1", "http://source2"] + ) + assert err.value.message == "Only one of pkgs or sources can be specified." + + +@pytest.mark.parametrize( + "sources", + [ + [ + OrderedDict( + [ + ( + "vim", + "https://mirrors.edge.kernel.org/ubuntu/pool/main/v/vim/vim_8.2.3995-1ubuntu2.12_amd64.deb", + ) + ] + ) + ], + [ + ( + "vim", + "https://mirrors.edge.kernel.org/ubuntu/pool/main/v/vim/vim_8.2.3995-1ubuntu2.12_amd64.deb", + ) + ], + ], +) +def test_hold_sources(sources): + """ + test aptpkg.hold when using sources + """ + set_sel = {"vim": {"old": "install", "new": "hold"}} + get_sel = {"hold": []} + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel) + with patch_get_sel, patch_set_sel: + ret = aptpkg.hold(sources=sources) + assert ret == { + "vim": { + "name": "vim", + "changes": {"old": "install", "new": "hold"}, + "result": True, + "comment": "Package vim is now being held.", + } + } + + +def test_hold_true(): + """ + test aptpkg.hold() when passing in the name of a package + and test is True + """ + set_sel = {"vim": {"old": "install", "new": "hold"}} + get_sel = {"hold": []} + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel) + with patch_get_sel, patch_set_sel: + with patch.dict(aptpkg.__opts__, {"test": True}): + ret = aptpkg.hold("vim") + assert ret == { + "vim": { + "name": "vim", + "changes": {}, + "result": None, + "comment": "Package vim is set to be held.", + } + } + + +def test_hold_already_set(): + """ + test aptpkg.hold() when the pkg is already set + """ + get_sel = {"hold": ["vim"]} + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + with patch_get_sel: + ret = aptpkg.hold("vim") + assert ret == { + "vim": { + "name": "vim", + "changes": {}, + "result": True, + "comment": "Package vim is already set to be held.", + } + } + + +def test_hold_pkgs(): + """ + test aptpkg.hold() when passing in pkgs + """ + get_sel = {"hold": []} + mock_set_sel = MagicMock() + mock_set_sel.side_effect = [ + {"vim": {"old": "install", "new": "hold"}}, + {"vim-nox": {"old": "install", "new": "hold"}}, + ] + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + patch_set_sel = patch("salt.modules.aptpkg.set_selections", mock_set_sel) + with patch_get_sel, patch_set_sel: + ret = aptpkg.hold(pkgs=["vim", "vim-nox"]) + assert ret == { + "vim": { + "name": "vim", + "changes": {"old": "install", "new": "hold"}, + "result": True, + "comment": "Package vim is now being held.", + }, + "vim-nox": { + "name": "vim-nox", + "changes": {"old": "install", "new": "hold"}, + "result": True, + "comment": "Package vim-nox is now being held.", + }, + } + + +def test_unhold(): + """ + test aptpkg.unhold when passing pacakge as name + """ + set_sel = {"vim": {"old": "hold", "new": "install"}} + get_sel = {"hold": ["vim"]} + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel) + with patch_get_sel, patch_set_sel: + ret = aptpkg.unhold("vim") + assert ret == { + "vim": { + "name": "vim", + "changes": {"old": "hold", "new": "install"}, + "result": True, + "comment": "Package vim is no longer being held.", + } + } + + +def test_unhold_no_name_pkgs(): + """ + test aptpkg.unhold when we do not pass in a name or list of pkgs + """ + with pytest.raises(SaltInvocationError) as err: + aptpkg.unhold() + assert err.value.message == "One of name, pkgs, or sources must be specified." + + +def test_unhold_pkgs_sources(): + """ + test aptpkg.unhold when we we set sources and a list of pkgs. + """ + with pytest.raises(SaltInvocationError) as err: + aptpkg.unhold( + pkgs=["vim", "apache2"], sources=["http://source1", "http://source2"] + ) + assert err.value.message == "Only one of pkgs or sources can be specified." + + +@pytest.mark.parametrize( + "sources", + [ + [ + OrderedDict( + [ + ( + "vim", + "https://mirrors.edge.kernel.org/ubuntu/pool/main/v/vim/vim_8.2.3995-1ubuntu2.12_amd64.deb", + ) + ] + ) + ], + [ + ( + "vim", + "https://mirrors.edge.kernel.org/ubuntu/pool/main/v/vim/vim_8.2.3995-1ubuntu2.12_amd64.deb", + ) + ], + ], +) +def test_unhold_sources(sources): + """ + test aptpkg.unhold when using sources + """ + set_sel = {"vim": {"old": "hold", "new": "install"}} + get_sel = {"hold": ["vim"]} + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel) + with patch_get_sel, patch_set_sel: + ret = aptpkg.unhold(sources=sources) + assert ret == { + "vim": { + "name": "vim", + "changes": {"old": "hold", "new": "install"}, + "result": True, + "comment": "Package vim is no longer being held.", + } + } + + +def test_unhold_true(): + """ + test aptpkg.unhold() when passing in the name of a package + and test is True + """ + set_sel = {"vim": {"old": "install", "new": "hold"}} + get_sel = {"hold": ["vim"]} + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + patch_set_sel = patch("salt.modules.aptpkg.set_selections", return_value=set_sel) + with patch_get_sel, patch_set_sel: + with patch.dict(aptpkg.__opts__, {"test": True}): + ret = aptpkg.unhold("vim") + assert ret == { + "vim": { + "name": "vim", + "changes": {}, + "result": None, + "comment": "Package vim is set not to be held.", + } + } + + +def test_unhold_already_set(): + """ + test aptpkg.unhold() when the pkg is already set + """ + get_sel = {"install": ["vim"]} + patch_get_sel = patch("salt.modules.aptpkg.get_selections", return_value=get_sel) + with patch_get_sel: + ret = aptpkg.unhold("vim") + assert ret == { + "vim": { + "name": "vim", + "changes": {}, + "result": True, + "comment": "Package vim is already set not to be held.", + } + } + + +def test_unhold_pkgs(): + """ + test aptpkg.hold() when passing in pkgs + """ + mock_get_sel = MagicMock() + mock_get_sel.side_effect = [{"hold": ["vim"]}, {"hold": ["vim-nox"]}] + mock_set_sel = MagicMock() + mock_set_sel.side_effect = [ + {"vim": {"old": "hold", "new": "install"}}, + {"vim-nox": {"old": "hold", "new": "install"}}, + ] + patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel) + patch_set_sel = patch("salt.modules.aptpkg.set_selections", mock_set_sel) + with patch_get_sel, patch_set_sel: + ret = aptpkg.unhold(pkgs=["vim", "vim-nox"]) + assert ret == { + "vim": { + "name": "vim", + "changes": {"old": "hold", "new": "install"}, + "result": True, + "comment": "Package vim is no longer being held.", + }, + "vim-nox": { + "name": "vim-nox", + "changes": {"old": "hold", "new": "install"}, + "result": True, + "comment": "Package vim-nox is no longer being held.", + }, + } + + +def test_get_key_from_id_keylength_not_valid(tmp_path, caplog): + """ + test _get_key_from_id when the keyid lenght is not valid + """ + ret = aptpkg._get_key_from_id(tmp_path, "FBB754512") + assert ret is False + assert "The keyid needs to be either 8 or 16 characters" in caplog.text + + +def test_get_key_from_id_not_added(tmp_path, caplog): + """ + test _get_key_from_id when the keyfile is not added + """ + ret = aptpkg._get_key_from_id(tmp_path, "FBB75451") + assert ret is False + assert "Could not find the key file for keyid" in caplog.text + + +def test_del_repo_key_keydir_doesnotexist(tmp_path, caplog): + """ + test del_repo_key when keydir does not exist and aptkey is False + """ + ret = aptpkg.del_repo_key( + keyid="0E08A149DE57BFBE", keydir=str(tmp_path / "keydir"), aptkey=False + ) + assert ret is False + assert "does not exist. Please create this directory" in caplog.text + + +def test_del_repo_key_keyid_doesnotexist(tmp_path, caplog): + """ + test del_repo_key when keyid is not passed in + """ + with patch("salt.utils.path.which", return_value=False): + with pytest.raises(SaltInvocationError) as err: + ret = aptpkg.del_repo_key(keydir=tmp_path, aptkey=False) + + assert err.value.message == "keyid or keyid_ppa and PPA name must be passed" + + +def test_del_repo_key_keyfile_doesnotexist(tmp_path, caplog): + """ + test del_repo_key when keyfile does not exist + """ + with patch("salt.utils.path.which", return_value=False): + ret = aptpkg.del_repo_key( + keyid="0E08A149DE57BFBE", keydir=tmp_path, aptkey=False + ) + assert ret is False + + +def test_set_selections(): + """ + test set_selections() with valid state + """ + pkg = "salt-minion" + mock_get_sel = MagicMock( + return_value={ + "install": ["adduser", pkg, "apparmor"], + "deinstall": ["python3-json-pointer"], + } + ) + patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel) + mock_call_apt = MagicMock( + return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""} + ) + patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt) + patch_opts = patch.dict(aptpkg.__opts__, {"test": False}) + with patch_get_sel, patch_call_apt, patch_opts: + ret = aptpkg.set_selections(selection=f'{{"hold": [{pkg}]}}') + assert ret == {pkg: {"old": "install", "new": "hold"}} + + +def test_set_selections_no_path_selection(): + """ + test set_selections() when path or selection are not passed + """ + pkg = "salt-minion" + mock_get_sel = MagicMock( + return_value={ + "install": ["adduser", pkg, "apparmor"], + "deinstall": ["python3-json-pointer"], + } + ) + patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel) + mock_call_apt = MagicMock( + return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""} + ) + patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt) + patch_opts = patch.dict(aptpkg.__opts__, {"test": False}) + with patch_get_sel, patch_call_apt, patch_opts: + ret = aptpkg.set_selections() + assert ret == {} + + +def test_set_selections_path_and_selection(tmp_path): + """ + test set_selections() when path and selection are passed + """ + pkg = "salt-minion" + mock_get_sel = MagicMock( + return_value={ + "install": ["adduser", pkg, "apparmor"], + "deinstall": ["python3-json-pointer"], + } + ) + patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel) + mock_call_apt = MagicMock( + return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""} + ) + patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt) + patch_opts = patch.dict(aptpkg.__opts__, {"test": False}) + with patch_get_sel, patch_call_apt, patch_opts: + with pytest.raises(SaltInvocationError) as err: + ret = aptpkg.set_selections(selection=f'{{"hold": [{pkg}]}}', path=tmp_path) + assert "The 'selection' and 'path' arguments" in err.value.message + + +def test_set_selections_invalid_yaml(): + """ + test set_selections() with invalid yaml with selections + """ + pkg = "salt-minion" + mock_get_sel = MagicMock( + return_value={ + "install": ["adduser", pkg, "apparmor"], + "deinstall": ["python3-json-pointer"], + } + ) + patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel) + mock_call_apt = MagicMock( + return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""} + ) + patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt) + patch_opts = patch.dict(aptpkg.__opts__, {"test": False}) + with patch_get_sel, patch_call_apt, patch_opts: + with pytest.raises(SaltInvocationError) as err: + aptpkg.set_selections(selection='{{"hold": [{pkg}]}') + assert "Improperly-formatted selection" in err.value.message + + +def test_set_selections_path(tmp_path): + """ + test set_selections() with path + """ + pkg = "salt-minion" + select_file = tmp_path / "select" + mock_get_sel = MagicMock( + return_value={ + "install": ["adduser", pkg, "apparmor"], + "deinstall": ["python3-json-pointer"], + } + ) + patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel) + mock_call_apt = MagicMock( + return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""} + ) + patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt) + patch_opts = patch.dict(aptpkg.__opts__, {"test": False}) + patch_salt = patch.dict( + aptpkg.__salt__, {"cp.cache_file": MagicMock(return_value=select_file)} + ) + + with salt.utils.files.fopen(select_file, "w") as fp: + fp.write("salt-minion hold\n adduser hold") + with patch_get_sel, patch_call_apt, patch_opts, patch_salt: + ret = aptpkg.set_selections(path=str(select_file)) + assert ret == { + pkg: {"old": "install", "new": "hold"}, + "adduser": {"old": "install", "new": "hold"}, + } + + +def test_set_selections_invalid_state(): + """ + test set_selections() with invalid state + """ + pkg = "salt-minion" + mock_get_sel = MagicMock( + return_value={ + "install": ["adduser", pkg, "apparmor"], + "deinstall": ["python3-json-pointer"], + } + ) + patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel) + mock_call_apt = MagicMock( + return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""} + ) + patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt) + patch_opts = patch.dict(aptpkg.__opts__, {"test": False}) + with patch_get_sel, patch_call_apt, patch_opts: + with pytest.raises(SaltInvocationError) as err: + aptpkg.set_selections(selection=f'{{"doesnotexist": [{pkg}]}}') + + assert err.value.message == "Invalid state(s): doesnotexist" + + +def test_set_selections_test(): + """ + test set_selections() with valid state and test is True in opts + """ + pkg = "salt-minion" + mock_get_sel = MagicMock( + return_value={ + "install": ["adduser", pkg, "apparmor"], + "deinstall": ["python3-json-pointer"], + } + ) + patch_get_sel = patch("salt.modules.aptpkg.get_selections", mock_get_sel) + mock_call_apt = MagicMock( + return_value={"pid": 8748, "retcode": 0, "stdout": "", "stderr": ""} + ) + patch_call_apt = patch("salt.modules.aptpkg._call_apt", mock_call_apt) + patch_opts = patch.dict(aptpkg.__opts__, {"test": True}) + with patch_get_sel, patch_call_apt, patch_opts: + ret = aptpkg.set_selections(selection=f'{{"hold": [{pkg}]}}') + assert ret == {} diff --git a/tests/pytests/unit/modules/test_boto_dynamodb.py b/tests/pytests/unit/modules/test_boto_dynamodb.py new file mode 100644 index 000000000000..f5b983e13f0d --- /dev/null +++ b/tests/pytests/unit/modules/test_boto_dynamodb.py @@ -0,0 +1,92 @@ +""" + Test cases for salt.modules.boto_dynamodb +""" + + +import pytest + +import salt.modules.boto_dynamodb as boto_dynamodb +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {boto_dynamodb: {"__opts__": {}, "__utils__": {}}} + + +@pytest.fixture +def arn(): + return "arn:aws:dynamodb:us-east-1:012345678901:table/my-table" + + +@pytest.fixture +def tags(): + return {"foo": "bar", "hello": "world"} + + +@pytest.fixture +def tags_as_list(): + return [{"Key": "foo", "Value": "bar"}, {"Key": "hello", "Value": "world"}] + + +class DummyConn: + def __init__(self, tags_as_list): + self.list_tags_of_resource = MagicMock( + return_value={"Tags": tags_as_list, "NextToken": None} + ) + self.tag_resource = MagicMock(return_value=True) + self.untag_resource = MagicMock(return_value=True) + + +def test_list_tags_of_resource(arn, tags, tags_as_list): + """ + Test that the correct API call is made and correct return format is + returned. + """ + conn = DummyConn(tags_as_list) + utils = {"boto3.get_connection": MagicMock(return_value=conn)} + with patch.dict(boto_dynamodb.__utils__, utils): + ret = boto_dynamodb.list_tags_of_resource(resource_arn=arn) + + assert ret == tags, ret + conn.list_tags_of_resource.assert_called_once_with(ResourceArn=arn, NextToken="") + + +def test_tag_resource(arn, tags, tags_as_list): + """ + Test that the correct API call is made and correct return format is + returned. + """ + conn = DummyConn(tags_as_list) + utils = {"boto3.get_connection": MagicMock(return_value=conn)} + with patch.dict(boto_dynamodb.__utils__, utils): + ret = boto_dynamodb.tag_resource(resource_arn=arn, tags=tags) + + assert ret is True, ret + # Account for differing dict iteration order among Python versions by + # being more explicit in asserts. + assert len(conn.tag_resource.mock_calls) == 1 + call = conn.tag_resource.mock_calls[0] + # No positional args + assert not call.args + # Make sure there aren't any additional kwargs beyond what we expect + assert len(call.kwargs) == 2 + assert call.kwargs["ResourceArn"] == arn + # Make sure there aren't any additional tags beyond what we expect + assert len(call.kwargs["Tags"]) == 2 + for tag_dict in tags_as_list: + assert tag_dict in call.kwargs["Tags"] + + +def test_untag_resource(arn, tags, tags_as_list): + """ + Test that the correct API call is made and correct return format is + returned. + """ + conn = DummyConn(tags_as_list) + utils = {"boto3.get_connection": MagicMock(return_value=conn)} + with patch.dict(boto_dynamodb.__utils__, utils): + ret = boto_dynamodb.untag_resource(resource_arn=arn, tag_keys=sorted(tags)) + + assert ret is True, ret + conn.untag_resource.assert_called_once_with(ResourceArn=arn, TagKeys=sorted(tags)) diff --git a/tests/pytests/unit/modules/test_cassandra_cql.py b/tests/pytests/unit/modules/test_cassandra_cql.py index e9a1a271d1dc..6b39d2f0cc4d 100644 --- a/tests/pytests/unit/modules/test_cassandra_cql.py +++ b/tests/pytests/unit/modules/test_cassandra_cql.py @@ -15,6 +15,11 @@ log = logging.getLogger(__name__) +pytestmark = pytest.mark.skipif( + not cassandra_cql.HAS_DRIVER, reason="Cassandra CQL driver not loaded" +) + + @pytest.fixture def configure_loader_modules(): return {cassandra_cql: {}} diff --git a/tests/pytests/unit/modules/test_chocolatey.py b/tests/pytests/unit/modules/test_chocolatey.py index e9adc167b70a..ac2c73410456 100644 --- a/tests/pytests/unit/modules/test_chocolatey.py +++ b/tests/pytests/unit/modules/test_chocolatey.py @@ -1,8 +1,6 @@ """ Test for the chocolatey module """ - - import os import pytest @@ -25,12 +23,17 @@ def choco_path(): @pytest.fixture(scope="module") -def choco_path_pd(): +def chocolatey_path_pd(): return os.path.join( os.environ.get("ProgramData"), "Chocolatey", "bin", "chocolatey.exe" ) +@pytest.fixture(scope="module") +def choco_path_pd(): + return os.path.join(os.environ.get("ProgramData"), "Chocolatey", "bin", "choco.exe") + + @pytest.fixture(scope="module") def choco_path_sd(): return os.path.join( @@ -133,12 +136,28 @@ def test__find_chocolatey_which(choco_path): assert chocolatey.__context__["chocolatey._path"] == expected -def test__find_chocolatey_programdata(mock_false, mock_true, choco_path_pd): +def test__find_chocolatey_programdata(mock_false, mock_true, chocolatey_path_pd): """ - Test _find_chocolatey when found in ProgramData + Test _find_chocolatey when found in ProgramData and named chocolatey.exe """ with patch.dict(chocolatey.__salt__, {"cmd.which": mock_false}), patch( "os.path.isfile", mock_true + ): + result = chocolatey._find_chocolatey() + expected = chocolatey_path_pd + # Does it return the correct path + assert result == expected + # Does it populate __context__ + assert chocolatey.__context__["chocolatey._path"] == expected + + +def test__find_choco_programdata(mock_false, choco_path_pd): + """ + Test _find_chocolatey when found in ProgramData and named choco.exe + """ + mock_is_file = MagicMock(side_effect=[False, True]) + with patch.dict(chocolatey.__salt__, {"cmd.which": mock_false}), patch( + "os.path.isfile", mock_is_file ): result = chocolatey._find_chocolatey() expected = choco_path_pd @@ -153,7 +172,7 @@ def test__find_chocolatey_systemdrive(mock_false, choco_path_sd): Test _find_chocolatey when found on SystemDrive (older versions) """ with patch.dict(chocolatey.__salt__, {"cmd.which": mock_false}), patch( - "os.path.isfile", MagicMock(side_effect=[False, True]) + "os.path.isfile", MagicMock(side_effect=[False, False, True]) ): result = chocolatey._find_chocolatey() expected = choco_path_sd @@ -169,7 +188,7 @@ def test_version_check_remote_false(): """ list_return_value = {"ack": ["3.1.1"]} with patch.object(chocolatey, "list_", return_value=list_return_value): - expected = {"ack": ["3.1.1"]} + expected = {"ack": {"installed": ["3.1.1"]}} result = chocolatey.version("ack", check_remote=False) assert result == expected @@ -241,3 +260,75 @@ def test_add_source(choco_path): "source_name", "source_location", priority="priority" ) cmd_run_all_mock.assert_called_with(expected_call, python_shell=False) + + +def test_list_pre_2_0_0(): + mock_version = MagicMock(return_value="1.2.1") + mock_find = MagicMock(return_value=choco_path) + mock_run = MagicMock(return_value={"stdout": "No packages", "retcode": 0}) + with patch.object(chocolatey, "chocolatey_version", mock_version), patch.object( + chocolatey, "_find_chocolatey", mock_find + ), patch.dict(chocolatey.__salt__, {"cmd.run_all": mock_run}): + chocolatey.list_() + expected_call = [choco_path, "list", "--limit-output"] + mock_run.assert_called_with(expected_call, python_shell=False) + + +def test_list_post_2_0_0(): + mock_version = MagicMock(return_value="2.0.1") + mock_find = MagicMock(return_value=choco_path) + mock_run = MagicMock(return_value={"stdout": "No packages", "retcode": 0}) + with patch.object(chocolatey, "chocolatey_version", mock_version), patch.object( + chocolatey, "_find_chocolatey", mock_find + ), patch.dict(chocolatey.__salt__, {"cmd.run_all": mock_run}): + chocolatey.list_() + expected_call = [choco_path, "search", "--limit-output"] + mock_run.assert_called_with(expected_call, python_shell=False) + + +def test_list_webpi_pre_2_0_0(): + mock_version = MagicMock(return_value="1.2.1") + mock_find = MagicMock(return_value=choco_path) + mock_run = MagicMock(return_value={"stdout": "No packages", "retcode": 0}) + with patch.object(chocolatey, "chocolatey_version", mock_version), patch.object( + chocolatey, "_find_chocolatey", mock_find + ), patch.dict(chocolatey.__salt__, {"cmd.run_all": mock_run}): + chocolatey.list_webpi() + expected_call = [choco_path, "list", "--source", "webpi"] + mock_run.assert_called_with(expected_call, python_shell=False) + + +def test_list_webpi_post_2_0_0(): + mock_version = MagicMock(return_value="2.0.1") + mock_find = MagicMock(return_value=choco_path) + mock_run = MagicMock(return_value={"stdout": "No packages", "retcode": 0}) + with patch.object(chocolatey, "chocolatey_version", mock_version), patch.object( + chocolatey, "_find_chocolatey", mock_find + ), patch.dict(chocolatey.__salt__, {"cmd.run_all": mock_run}): + chocolatey.list_webpi() + expected_call = [choco_path, "search", "--source", "webpi"] + mock_run.assert_called_with(expected_call, python_shell=False) + + +def test_list_windowsfeatures_pre_2_0_0(): + mock_version = MagicMock(return_value="1.2.1") + mock_find = MagicMock(return_value=choco_path) + mock_run = MagicMock(return_value={"stdout": "No packages", "retcode": 0}) + with patch.object(chocolatey, "chocolatey_version", mock_version), patch.object( + chocolatey, "_find_chocolatey", mock_find + ), patch.dict(chocolatey.__salt__, {"cmd.run_all": mock_run}): + chocolatey.list_windowsfeatures() + expected_call = [choco_path, "list", "--source", "windowsfeatures"] + mock_run.assert_called_with(expected_call, python_shell=False) + + +def test_list_windowsfeatures_post_2_0_0(): + mock_version = MagicMock(return_value="2.0.1") + mock_find = MagicMock(return_value=choco_path) + mock_run = MagicMock(return_value={"stdout": "No packages", "retcode": 0}) + with patch.object(chocolatey, "chocolatey_version", mock_version), patch.object( + chocolatey, "_find_chocolatey", mock_find + ), patch.dict(chocolatey.__salt__, {"cmd.run_all": mock_run}): + chocolatey.list_windowsfeatures() + expected_call = [choco_path, "search", "--source", "windowsfeatures"] + mock_run.assert_called_with(expected_call, python_shell=False) diff --git a/tests/pytests/unit/modules/test_config.py b/tests/pytests/unit/modules/test_config.py new file mode 100644 index 000000000000..c522dc7897cc --- /dev/null +++ b/tests/pytests/unit/modules/test_config.py @@ -0,0 +1,186 @@ +""" + Test cases for salt.modules.config +""" + + +import fnmatch + +import pytest + +import salt.modules.config as config +from tests.support.mock import patch + + +@pytest.fixture +def defaults(): + return { + "test.option.foo": "value of test.option.foo in defaults", + "test.option.bar": "value of test.option.bar in defaults", + "test.option.baz": "value of test.option.baz in defaults", + "test.option": "value of test.option in defaults", + } + + +@pytest.fixture +def no_match(): + return "test.option.nope" + + +@pytest.fixture +def opt_name(): + return "test.option.foo" + + +@pytest.fixture +def wildcard_opt_name(): + return "test.option.b*" + + +@pytest.fixture +def configure_loader_modules(): + return { + config: { + "__opts__": { + "test.option.foo": "value of test.option.foo in __opts__", + "test.option.bar": "value of test.option.bar in __opts__", + "test.option.baz": "value of test.option.baz in __opts__", + }, + "__pillar__": { + "test.option.foo": "value of test.option.foo in __pillar__", + "test.option.bar": "value of test.option.bar in __pillar__", + "test.option.baz": "value of test.option.baz in __pillar__", + "master": { + "test.option.foo": "value of test.option.foo in master", + "test.option.bar": "value of test.option.bar in master", + "test.option.baz": "value of test.option.baz in master", + }, + }, + "__grains__": { + "test.option.foo": "value of test.option.foo in __grains__", + "test.option.bar": "value of test.option.bar in __grains__", + "test.option.baz": "value of test.option.baz in __grains__", + }, + } + } + + +def _wildcard_match(data, wildcard_opt_name): + return {x: data[x] for x in fnmatch.filter(data, wildcard_opt_name)} + + +def test_defaults_only_name(defaults): + with patch.dict(config.DEFAULTS, defaults): + opt_name = "test.option" + opt = config.option(opt_name) + assert opt == config.DEFAULTS[opt_name] + + +def test_no_match(defaults, no_match, wildcard_opt_name): + """ + Make sure that the defa + """ + with patch.dict(config.DEFAULTS, defaults): + ret = config.option(no_match) + assert ret == "", ret + + default = "wat" + ret = config.option(no_match, default=default) + assert ret == default, ret + + ret = config.option(no_match, wildcard=True) + assert ret == {}, ret + + default = {"foo": "bar"} + ret = config.option(no_match, default=default, wildcard=True) + assert ret == default, ret + + # Should be no match since wildcard=False + ret = config.option(wildcard_opt_name) + assert ret == "", ret + + +def test_omits(defaults, opt_name, wildcard_opt_name): + with patch.dict(config.DEFAULTS, defaults): + + # ********** OMIT NOTHING ********** + + # Match should be in __opts__ dict + ret = config.option(opt_name) + assert ret == config.__opts__[opt_name], ret + + # Wildcard match + ret = config.option(wildcard_opt_name, wildcard=True) + assert ret == _wildcard_match(config.__opts__, wildcard_opt_name), ret + + # ********** OMIT __opts__ ********** + + # Match should be in __grains__ dict + ret = config.option(opt_name, omit_opts=True) + assert ret == config.__grains__[opt_name], ret + + # Wildcard match + ret = config.option(wildcard_opt_name, omit_opts=True, wildcard=True) + assert ret == _wildcard_match(config.__grains__, wildcard_opt_name), ret + + # ********** OMIT __opts__, __grains__ ********** + + # Match should be in __pillar__ dict + ret = config.option(opt_name, omit_opts=True, omit_grains=True) + assert ret == config.__pillar__[opt_name], ret + + # Wildcard match + ret = config.option( + wildcard_opt_name, omit_opts=True, omit_grains=True, wildcard=True + ) + assert ret == _wildcard_match(config.__pillar__, wildcard_opt_name), ret + + # ********** OMIT __opts__, __grains__, __pillar__ ********** + + # Match should be in master opts + ret = config.option( + opt_name, omit_opts=True, omit_grains=True, omit_pillar=True + ) + assert ret == config.__pillar__["master"][opt_name], ret + + # Wildcard match + ret = config.option( + wildcard_opt_name, + omit_opts=True, + omit_grains=True, + omit_pillar=True, + wildcard=True, + ) + assert ret == _wildcard_match( + config.__pillar__["master"], wildcard_opt_name + ), ret + + # ********** OMIT ALL THE THINGS ********** + + # Match should be in master opts + ret = config.option( + opt_name, + omit_opts=True, + omit_grains=True, + omit_pillar=True, + omit_master=True, + ) + assert ret == config.DEFAULTS[opt_name], ret + + # Wildcard match + ret = config.option( + wildcard_opt_name, + omit_opts=True, + omit_grains=True, + omit_pillar=True, + omit_master=True, + wildcard=True, + ) + assert ret == _wildcard_match(config.DEFAULTS, wildcard_opt_name), ret + + # Match should be in master opts + ret = config.option(opt_name, omit_all=True) + assert ret == config.DEFAULTS[opt_name], ret + + # Wildcard match + ret = config.option(wildcard_opt_name, omit_all=True, wildcard=True) + assert ret == _wildcard_match(config.DEFAULTS, wildcard_opt_name), ret diff --git a/tests/pytests/unit/modules/test_cryptdev.py b/tests/pytests/unit/modules/test_cryptdev.py new file mode 100644 index 000000000000..c5932b5f369b --- /dev/null +++ b/tests/pytests/unit/modules/test_cryptdev.py @@ -0,0 +1,38 @@ +import pytest + +import salt.modules.cryptdev as cryptdev +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return {cryptdev: {"__opts__": minion_opts}} + + +def test_active(caplog): + with patch.dict( + cryptdev.__salt__, + {"cmd.run_stdout": MagicMock(return_value="my-device (253, 1)\n")}, + ): + assert cryptdev.active() == { + "my-device": { + "devname": "my-device", + "major": "253", + "minor": "1", + } + } + + # debien output when no devices setup. + with patch.dict(cryptdev.__salt__, {"cmd.run_stdout": MagicMock(return_value="")}): + caplog.clear() + assert cryptdev.active() == {} + assert "dmsetup output does not match expected format" in caplog.text + + # centos output of dmsetup when no devices setup. + with patch.dict( + cryptdev.__salt__, + {"cmd.run_stdout": MagicMock(return_value="No devices found")}, + ): + caplog.clear() + assert cryptdev.active() == {} + assert "dmsetup output does not match expected format" in caplog.text diff --git a/tests/pytests/unit/modules/test_deb_postgres.py b/tests/pytests/unit/modules/test_deb_postgres.py new file mode 100644 index 000000000000..f125f96707a9 --- /dev/null +++ b/tests/pytests/unit/modules/test_deb_postgres.py @@ -0,0 +1,165 @@ +import logging + +import pytest + +import salt.modules.deb_postgres as deb_postgres +from tests.support.mock import Mock, patch + +log = logging.getLogger(__name__) + +pytestmark = [ + pytest.mark.skip_unless_on_linux(reason="Only supported on Linux family"), +] + + +@pytest.fixture +def get_lscuster(): + return """\ +8.4 main 5432 online postgres /srv/8.4/main \ + /var/log/postgresql/postgresql-8.4-main.log +9.1 main 5433 online postgres /srv/9.1/main \ + /var/log/postgresql/postgresql-9.1-main.log +""" + + +@pytest.fixture +def configure_loader_modules(get_lscuster): + return { + deb_postgres: { + "__salt__": { + "config.option": Mock(), + "cmd.run_all": Mock(return_value={"stdout": get_lscuster}), + "file.chown": Mock(), + "file.remove": Mock(), + } + } + } + + +def test_cluster_create(): + with patch("salt.utils.path.which", Mock(return_value="/usr/bin/pg_createcluster")): + expected_cmdstr = ( + "/usr/bin/pg_createcluster " + "--port 5432 --locale fr_FR --encoding UTF-8 " + "--datadir /opt/postgresql " + "9.3 main" + ) + + deb_postgres.cluster_create( + "9.3", + "main", + port="5432", + locale="fr_FR", + encoding="UTF-8", + datadir="/opt/postgresql", + ) + assert deb_postgres.__salt__["cmd.run_all"].call_args[0][0] == expected_cmdstr + + +def test_cluster_create_with_initdb_options(): + with patch("salt.utils.path.which", Mock(return_value="/usr/bin/pg_createcluster")): + expected_cmdstr = ( + "/usr/bin/pg_createcluster " + "--port 5432 --locale fr_FR --encoding UTF-8 " + "--datadir /opt/postgresql " + "11 main " + "-- " + "--allow-group-access " + "--data-checksums " + "--wal-segsize 32" + ) + + deb_postgres.cluster_create( + "11", + "main", + port="5432", + locale="fr_FR", + encoding="UTF-8", + datadir="/opt/postgresql", + allow_group_access=True, + data_checksums=True, + wal_segsize="32", + ) + assert deb_postgres.__salt__["cmd.run_all"].call_args[0][0] == expected_cmdstr + + +def test_cluster_create_with_float(): + with patch("salt.utils.path.which", Mock(return_value="/usr/bin/pg_createcluster")): + expected_cmdstr = ( + "/usr/bin/pg_createcluster " + "--port 5432 --locale fr_FR --encoding UTF-8 " + "--datadir /opt/postgresql " + "9.3 main" + ) + + deb_postgres.cluster_create( + 9.3, + "main", + port="5432", + locale="fr_FR", + encoding="UTF-8", + datadir="/opt/postgresql", + ) + assert deb_postgres.__salt__["cmd.run_all"].call_args[0][0] == expected_cmdstr + + +def test_parse_pg_lsclusters(get_lscuster): + with patch("salt.utils.path.which", Mock(return_value="/usr/bin/pg_lsclusters")): + stdout = get_lscuster + maxDiff = None + expected = { + "8.4/main": { + "port": 5432, + "status": "online", + "user": "postgres", + "datadir": "/srv/8.4/main", + "log": "/var/log/postgresql/postgresql-8.4-main.log", + }, + "9.1/main": { + "port": 5433, + "status": "online", + "user": "postgres", + "datadir": "/srv/9.1/main", + "log": "/var/log/postgresql/postgresql-9.1-main.log", + }, + } + assert deb_postgres._parse_pg_lscluster(stdout) == expected + + +def test_cluster_list(): + with patch("salt.utils.path.which", Mock(return_value="/usr/bin/pg_lsclusters")): + return_list = deb_postgres.cluster_list() + assert ( + deb_postgres.__salt__["cmd.run_all"].call_args[0][0] + == "/usr/bin/pg_lsclusters --no-header" + ) + + return_dict = deb_postgres.cluster_list(verbose=True) + assert isinstance(return_dict, dict) + + +def test_cluster_exists(): + assert deb_postgres.cluster_exists("8.4") + assert deb_postgres.cluster_exists("8.4", "main") + assert not deb_postgres.cluster_exists("3.4", "main") + + +def test_cluster_delete(): + with patch("salt.utils.path.which", Mock(return_value="/usr/bin/pg_dropcluster")): + deb_postgres.cluster_remove("9.3", "main") + assert ( + deb_postgres.__salt__["cmd.run_all"].call_args[0][0] + == "/usr/bin/pg_dropcluster 9.3 main" + ) + + deb_postgres.cluster_remove("9.3", "main", stop=True) + assert ( + deb_postgres.__salt__["cmd.run_all"].call_args[0][0] + == "/usr/bin/pg_dropcluster --stop 9.3 main" + ) + + deb_postgres.cluster_remove(9.3, "main", stop=True) + assert ( + deb_postgres.__salt__["cmd.run_all"].call_args[0][0] + == "/usr/bin/pg_dropcluster --stop 9.3 main" + ) diff --git a/tests/pytests/unit/modules/test_debian_ip.py b/tests/pytests/unit/modules/test_debian_ip.py index d569e8ace4ad..abbc0c61d8ec 100644 --- a/tests/pytests/unit/modules/test_debian_ip.py +++ b/tests/pytests/unit/modules/test_debian_ip.py @@ -1,4 +1,1213 @@ +import tempfile + +import jinja2.exceptions +import pytest + import salt.modules.debian_ip as debian_ip +import salt.utils.files +from tests.support.mock import MagicMock, patch + +try: + from salt.utils.odict import OrderedDict as odict +except ImportError: + from collections import OrderedDict as odict + +# Big pile of interface data for unit tests +# To skip, search for 'DebianIpTestCase' +# fmt: off + + +pytestmark = [ + pytest.mark.skip_on_windows(reason="Do not run these tests on Windows"), + pytest.mark.skip_on_darwin(reason="Do not run these tests on Mac"), +] + + +@pytest.fixture +def test_interfaces(): + return [ + # Structure + #{'iface_name': 'ethX', 'iface_type': 'eth', 'enabled': True, + # 'skip_test': bool(), # True to disable this test + # 'build_interface': dict(), # data read from sls + # 'get_interface(): OrderedDict(), # data read from interfaces file + # 'return': list()}, # jinja-rendered data + + # IPv4-only interface; single address + {'iface_name': 'eth1', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '192.168.4.9', + 'netmask': '255.255.255.0', + 'gateway': '192.168.4.1', + 'enable_ipv6': False, + 'noifupdown': True, + }, + 'get_interface': odict([('eth1', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('address', '192.168.4.9'), + ('netmask', '255.255.255.0'), + ('gateway', '192.168.4.1'), + ])), + ]))]))]), + 'return': [ + 'auto eth1\n', + 'iface eth1 inet static\n', + ' address 192.168.4.9\n', + ' netmask 255.255.255.0\n', + ' gateway 192.168.4.1\n', + '\n']}, + + # IPv6-only; single address + {'iface_name': 'eth2', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:beef::3', + 'ipv6netmask': '64', + 'ipv6gateway': '2001:db8:dead:beef::1', + 'enable_ipv6': True, + 'noifupdown': True, + }, + 'get_interface': odict([('eth2', odict([('enabled', True), ('data', odict([ + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:beef::3'), + ('netmask', 64), + ('gateway', '2001:db8:dead:beef::1'), + ])), + ]))]))]), + 'return': [ + 'auto eth2\n', + 'iface eth2 inet6 static\n', + ' address 2001:db8:dead:beef::3\n', + ' netmask 64\n', + ' gateway 2001:db8:dead:beef::1\n', + '\n']}, + + # IPv6-only; multiple addrs; no gw; first addr from ipv6addr + {'iface_name': 'eth3', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:beef::5/64', + 'ipv6ipaddrs': [ + '2001:db8:dead:beef::7/64', + '2001:db8:dead:beef::8/64', + '2001:db8:dead:beef::9/64'], + 'enable_ipv6': True, + 'noifupdown': True, + }, + 'get_interface': odict([('eth3', odict([('enabled', True), ('data', odict([ + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:beef::5/64'), + ('addresses', [ + '2001:db8:dead:beef::7/64', + '2001:db8:dead:beef::8/64', + '2001:db8:dead:beef::9/64', + ]), + ])), + ]))]))]), + 'return': [ + 'auto eth3\n', + 'iface eth3 inet6 static\n', + ' address 2001:db8:dead:beef::5/64\n', + ' address 2001:db8:dead:beef::7/64\n', + ' address 2001:db8:dead:beef::8/64\n', + ' address 2001:db8:dead:beef::9/64\n', + '\n']}, + + # IPv6-only; multiple addresses + {'iface_name': 'eth4', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'ipv6proto': 'static', + 'ipv6ipaddrs': [ + '2001:db8:dead:beef::5/64', + '2001:db8:dead:beef::7/64', + '2001:db8:dead:beef::8/64', + '2001:db8:dead:beef::9/64'], + 'ipv6gateway': '2001:db8:dead:beef::1', + 'enable_ipv6': True, + 'noifupdown': True, + }, + 'get_interface': odict([('eth4', odict([('enabled', True), ('data', odict([ + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:beef::5/64'), + ('addresses', [ + '2001:db8:dead:beef::7/64', + '2001:db8:dead:beef::8/64', + '2001:db8:dead:beef::9/64', + ]), + ('gateway', '2001:db8:dead:beef::1'), + ])), + ]))]))]), + 'return': [ + 'auto eth4\n', + 'iface eth4 inet6 static\n', + ' address 2001:db8:dead:beef::5/64\n', + ' address 2001:db8:dead:beef::7/64\n', + ' address 2001:db8:dead:beef::8/64\n', + ' address 2001:db8:dead:beef::9/64\n', + ' gateway 2001:db8:dead:beef::1\n', + '\n']}, + + # IPv4 and IPv6 settings with v4 disabled + {'iface_name': 'eth5', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '192.168.4.9', + 'netmask': '255.255.255.0', + 'gateway': '192.168.4.1', + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:beef::3', + 'ipv6netmask': '64', + 'ipv6gateway': '2001:db8:dead:beef::1', + 'enable_ipv4': False, + 'noifupdown': True, + }, + 'get_interface': odict([('eth5', odict([('enabled', True), ('data', odict([ + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:beef::3'), + ('netmask', 64), + ('gateway', '2001:db8:dead:beef::1'), + ])), + ]))]))]), + 'return': [ + 'auto eth5\n', + 'iface eth5 inet6 static\n', + ' address 2001:db8:dead:beef::3\n', + ' netmask 64\n', + ' gateway 2001:db8:dead:beef::1\n', + '\n']}, + + # IPv4 and IPv6 settings with v6 disabled + {'iface_name': 'eth6', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '192.168.4.9', + 'netmask': '255.255.255.0', + 'gateway': '192.168.4.1', + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:beef::3', + 'ipv6netmask': '64', + 'ipv6gateway': '2001:db8:dead:beef::1', + 'enable_ipv6': False, + 'noifupdown': True, + }, + 'get_interface': odict([('eth6', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('address', '192.168.4.9'), + ('netmask', '255.255.255.0'), + ('gateway', '192.168.4.1'), + ])), + ]))]))]), + 'return': [ + 'auto eth6\n', + 'iface eth6 inet static\n', + ' address 192.168.4.9\n', + ' netmask 255.255.255.0\n', + ' gateway 192.168.4.1\n', + '\n']}, + + # IPv4 and IPv6; shared/overridden settings + {'iface_name': 'eth7', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '192.168.4.9', + 'netmask': '255.255.255.0', + 'gateway': '192.168.4.1', + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:beef::3', + 'ipv6netmask': '64', + 'ipv6gateway': '2001:db8:dead:beef::1', + 'ttl': '18', # shared + 'ipv6ttl': '15', # overridden for v6 + 'mtu': '1480', # shared + 'enable_ipv6': True, + 'noifupdown': True, + }, + 'get_interface': odict([('eth7', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('address', '192.168.4.9'), + ('netmask', '255.255.255.0'), + ('gateway', '192.168.4.1'), + ('ttl', 18), + ('mtu', 1480), + ])), + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:beef::3'), + ('netmask', 64), + ('gateway', '2001:db8:dead:beef::1'), + ('ttl', 15), + ('mtu', 1480), + ])), + ]))]))]), + 'return': [ + 'auto eth7\n', + 'iface eth7 inet static\n', + ' address 192.168.4.9\n', + ' netmask 255.255.255.0\n', + ' gateway 192.168.4.1\n', + ' ttl 18\n', + ' mtu 1480\n', + 'iface eth7 inet6 static\n', + ' address 2001:db8:dead:beef::3\n', + ' netmask 64\n', + ' gateway 2001:db8:dead:beef::1\n', + ' ttl 15\n', + ' mtu 1480\n', + '\n']}, + + # Slave iface + {'iface_name': 'eth8', 'iface_type': 'slave', 'enabled': True, + 'build_interface': { + 'master': 'bond0', + 'noifupdown': True, + }, + 'get_interface': odict([('eth8', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'manual'), + ('filename', None), + ('bonding', odict([ + ('master', 'bond0'), + ])), + ('bonding_keys', ['master']), + ])), + ]))]))]), + 'return': [ + 'auto eth8\n', + 'iface eth8 inet manual\n', + ' bond-master bond0\n', + '\n']}, + + # Bond; with address IPv4 and IPv6 address; slaves as string + {'iface_name': 'bond9', 'iface_type': 'bond', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '10.1.0.14', + 'netmask': '255.255.255.0', + 'gateway': '10.1.0.1', + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:c0::3', + 'ipv6netmask': '64', + 'ipv6gateway': '2001:db8:dead:c0::1', + 'mode': '802.3ad', + 'slaves': 'eth4 eth5', + 'enable_ipv6': True, + 'noifupdown': True, + }, + 'get_interface': odict([('bond9', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('address', '10.1.0.14'), + ('netmask', '255.255.255.0'), + ('gateway', '10.1.0.1'), + ('bonding', odict([ + ('ad_select', '0'), + ('downdelay', '200'), + ('lacp_rate', '0'), + ('miimon', '100'), + ('mode', '4'), + ('slaves', 'eth4 eth5'), + ('updelay', '0'), + ('use_carrier', 'on'), + ])), + ('bonding_keys', [ + 'ad_select', + 'downdelay', + 'lacp_rate', + 'miimon', + 'mode', + 'slaves', + 'updelay', + 'use_carrier', + ]), + ])), + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:c0::3'), + ('netmask', 64), + ('gateway', '2001:db8:dead:c0::1'), + ('bonding', odict([ + ('ad_select', '0'), + ('downdelay', '200'), + ('lacp_rate', '0'), + ('miimon', '100'), + ('mode', '4'), + ('slaves', 'eth4 eth5'), + ('updelay', '0'), + ('use_carrier', 'on'), + ])), + ('bonding_keys', [ + 'ad_select', + 'downdelay', + 'lacp_rate', + 'miimon', + 'mode', + 'slaves', + 'updelay', + 'use_carrier', + ]), + ])), + ]))]))]), + 'return': [ + 'auto bond9\n', + 'iface bond9 inet static\n', + ' address 10.1.0.14\n', + ' netmask 255.255.255.0\n', + ' gateway 10.1.0.1\n', + ' bond-ad_select 0\n', + ' bond-downdelay 200\n', + ' bond-lacp_rate 0\n', + ' bond-miimon 100\n', + ' bond-mode 4\n', + ' bond-slaves eth4 eth5\n', + ' bond-updelay 0\n', + ' bond-use_carrier on\n', + 'iface bond9 inet6 static\n', + ' address 2001:db8:dead:c0::3\n', + ' netmask 64\n', + ' gateway 2001:db8:dead:c0::1\n', + ' bond-ad_select 0\n', + ' bond-downdelay 200\n', + ' bond-lacp_rate 0\n', + ' bond-miimon 100\n', + ' bond-mode 4\n', + ' bond-slaves eth4 eth5\n', + ' bond-updelay 0\n', + ' bond-use_carrier on\n', + '\n']}, + + # Bond; with address IPv4 and IPv6 address; slaves as list + {'iface_name': 'bond10', 'iface_type': 'bond', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '10.1.0.14', + 'netmask': '255.255.255.0', + 'gateway': '10.1.0.1', + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:c0::3', + 'ipv6netmask': '64', + 'ipv6gateway': '2001:db8:dead:c0::1', + 'mode': '802.3ad', + 'slaves': ['eth4', 'eth5'], + 'enable_ipv6': True, + 'noifupdown': True, + }, + 'get_interface': odict([('bond10', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('address', '10.1.0.14'), + ('netmask', '255.255.255.0'), + ('gateway', '10.1.0.1'), + ('bonding', odict([ + ('ad_select', '0'), + ('downdelay', '200'), + ('lacp_rate', '0'), + ('miimon', '100'), + ('mode', '4'), + ('slaves', 'eth4 eth5'), + ('updelay', '0'), + ('use_carrier', 'on'), + ])), + ('bonding_keys', [ + 'ad_select', + 'downdelay', + 'lacp_rate', + 'miimon', + 'mode', + 'slaves', + 'updelay', + 'use_carrier', + ]), + ])), + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:c0::3'), + ('netmask', 64), + ('gateway', '2001:db8:dead:c0::1'), + ('bonding', odict([ + ('ad_select', '0'), + ('downdelay', '200'), + ('lacp_rate', '0'), + ('miimon', '100'), + ('mode', '4'), + ('slaves', 'eth4 eth5'), + ('updelay', '0'), + ('use_carrier', 'on'), + ])), + ('bonding_keys', [ + 'ad_select', + 'downdelay', + 'lacp_rate', + 'miimon', + 'mode', + 'slaves', + 'updelay', + 'use_carrier', + ]), + ])), + ]))]))]), + 'return': [ + 'auto bond10\n', + 'iface bond10 inet static\n', + ' address 10.1.0.14\n', + ' netmask 255.255.255.0\n', + ' gateway 10.1.0.1\n', + ' bond-ad_select 0\n', + ' bond-downdelay 200\n', + ' bond-lacp_rate 0\n', + ' bond-miimon 100\n', + ' bond-mode 4\n', + ' bond-slaves eth4 eth5\n', + ' bond-updelay 0\n', + ' bond-use_carrier on\n', + 'iface bond10 inet6 static\n', + ' address 2001:db8:dead:c0::3\n', + ' netmask 64\n', + ' gateway 2001:db8:dead:c0::1\n', + ' bond-ad_select 0\n', + ' bond-downdelay 200\n', + ' bond-lacp_rate 0\n', + ' bond-miimon 100\n', + ' bond-mode 4\n', + ' bond-slaves eth4 eth5\n', + ' bond-updelay 0\n', + ' bond-use_carrier on\n', + '\n']}, + + # Bond VLAN; with IPv4 address + {'iface_name': 'bond0.11', 'iface_type': 'vlan', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '10.7.0.8', + 'netmask': '255.255.255.0', + 'gateway': '10.7.0.1', + 'slaves': 'eth6 eth7', + 'mode': '802.3ad', + 'enable_ipv6': False, + 'noifupdown': True, + }, + 'get_interface': odict([('bond0.11', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('vlan_raw_device', 'bond1'), + ('address', '10.7.0.8'), + ('netmask', '255.255.255.0'), + ('gateway', '10.7.0.1'), + ('mode', '802.3ad'), + ])), + ]))]))]), + 'return': [ + 'auto bond0.11\n', + 'iface bond0.11 inet static\n', + ' vlan-raw-device bond1\n', + ' address 10.7.0.8\n', + ' netmask 255.255.255.0\n', + ' gateway 10.7.0.1\n', + ' mode 802.3ad\n', + '\n']}, + + # Bond; without address + {'iface_name': 'bond0.12', 'iface_type': 'vlan', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'slaves': 'eth6 eth7', + 'mode': '802.3ad', + 'enable_ipv6': False, + 'noifupdown': True, + }, + 'get_interface': odict([('bond0.12', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('vlan_raw_device', 'bond1'), + ('mode', '802.3ad'), + ])), + ]))]))]), + 'return': [ + 'auto bond0.12\n', + 'iface bond0.12 inet static\n', + ' vlan-raw-device bond1\n', + ' mode 802.3ad\n', + '\n']}, + + # Bridged interface + {'iface_name': 'br0', 'iface_type': 'bridge', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '192.168.4.10', + 'netmask': '255.255.255.0', + 'gateway': '192.168.4.1', + 'bridge_ports': 'eth1', + 'enable_ipv6': False, + 'noifupdown': True, + }, + 'get_interface': odict([('br0', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('address', '192.168.4.10'), + ('netmask', '255.255.255.0'), + ('gateway', '192.168.4.1'), + ('bridging', odict([ + ('ports', 'eth1'), + ])), + ('bridging_keys', ['ports']), + ])), + ]))]))]), + 'return': [ + 'auto br0\n', + 'iface br0 inet static\n', + ' address 192.168.4.10\n', + ' netmask 255.255.255.0\n', + ' gateway 192.168.4.1\n', + ' bridge_ports eth1\n', + '\n']}, + + + # DNS NS as list + {'iface_name': 'eth13', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '192.168.4.9', + 'netmask': '255.255.255.0', + 'gateway': '192.168.4.1', + 'enable_ipv6': False, + 'noifupdown': True, + 'dns': ['8.8.8.8', '8.8.4.4'], + }, + 'get_interface': odict([('eth13', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('address', '192.168.4.9'), + ('netmask', '255.255.255.0'), + ('gateway', '192.168.4.1'), + ('dns_nameservers', ['8.8.8.8', '8.8.4.4']), + ])), + ]))]))]), + 'return': [ + 'auto eth13\n', + 'iface eth13 inet static\n', + ' address 192.168.4.9\n', + ' netmask 255.255.255.0\n', + ' gateway 192.168.4.1\n', + ' dns-nameservers 8.8.8.8 8.8.4.4\n', + '\n']}, + + # DNS NS as string + {'iface_name': 'eth14', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'static', + 'ipaddr': '192.168.4.9', + 'netmask': '255.255.255.0', + 'gateway': '192.168.4.1', + 'enable_ipv6': False, + 'noifupdown': True, + 'dns': '8.8.8.8 8.8.4.4', + }, + 'get_interface': odict([('eth14', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'static'), + ('filename', None), + ('address', '192.168.4.9'), + ('netmask', '255.255.255.0'), + ('gateway', '192.168.4.1'), + ('dns_nameservers', ['8.8.8.8', '8.8.4.4']), + ])), + ]))]))]), + 'return': [ + 'auto eth14\n', + 'iface eth14 inet static\n', + ' address 192.168.4.9\n', + ' netmask 255.255.255.0\n', + ' gateway 192.168.4.1\n', + ' dns-nameservers 8.8.8.8 8.8.4.4\n', + '\n']}, + + # Loopback; with IPv4 and IPv6 address + {'iface_name': 'lo15', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'loopback', + 'ipaddr': '192.168.4.9', + 'netmask': '255.255.255.0', + 'gateway': '192.168.4.1', + 'enable_ipv6': True, + 'ipv6proto': 'loopback', + 'ipv6ipaddr': 'fc00::1', + 'ipv6netmask': '128', + 'ipv6_autoconf': False, + 'noifupdown': True, + }, + 'get_interface': odict([('lo15', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'loopback'), + ('filename', None), + ('address', '192.168.4.9'), + ('netmask', '255.255.255.0'), + ('gateway', '192.168.4.1'), + ])), + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'loopback'), + ('filename', None), + ('address', 'fc00::1'), + ('netmask', 128), + ])), + ]))]))]), + 'return': [ + 'auto lo15\n', + 'iface lo15 inet loopback\n', + ' address 192.168.4.9\n', + ' netmask 255.255.255.0\n', + ' gateway 192.168.4.1\n', + 'iface lo15 inet6 loopback\n', + ' address fc00::1\n', + ' netmask 128\n', + '\n']}, + + # Loopback; with only IPv6 address; enabled=False + {'iface_name': 'lo16', 'iface_type': 'eth', 'enabled': False, + 'build_interface': { + 'enable_ipv6': True, + 'ipv6proto': 'loopback', + 'ipv6ipaddr': 'fc00::1', + 'ipv6netmask': '128', + 'ipv6_autoconf': False, + 'noifupdown': True, + }, + 'get_interface': odict([('lo16', odict([('data', odict([ + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'loopback'), + ('filename', None), + ('address', 'fc00::1'), + ('netmask', 128), + ])), + ]))]))]), + 'return': [ + 'iface lo16 inet6 loopback\n', + ' address fc00::1\n', + ' netmask 128\n', + '\n']}, + + # Loopback; without address + {'iface_name': 'lo17', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'loopback', + 'enable_ipv6': False, + 'noifupdown': True, + }, + 'get_interface': odict([('lo17', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'loopback'), + ('filename', None), + ])), + ]))]))]), + 'return': [ + 'auto lo17\n', + 'iface lo17 inet loopback\n', + '\n']}, + + # IPv4=DHCP; IPv6=Static; with IPv6 netmask + {'iface_name': 'eth18', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'dhcp', + 'enable_ipv6': True, + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:c0::3', + 'ipv6netmask': '64', + 'ipv6gateway': '2001:db8:dead:c0::1', + 'noifupdown': True, + }, + 'get_interface': odict([('eth18', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'dhcp'), + ('filename', None), + ])), + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:c0::3'), + ('netmask', 64), + ('gateway', '2001:db8:dead:c0::1'), + ])), + ]))]))]), + 'return': [ + 'auto eth18\n', + 'iface eth18 inet dhcp\n', + 'iface eth18 inet6 static\n', + ' address 2001:db8:dead:c0::3\n', + ' netmask 64\n', + ' gateway 2001:db8:dead:c0::1\n', + '\n']}, + + # IPv4=DHCP; IPv6=Static; without IPv6 netmask + {'iface_name': 'eth19', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'proto': 'dhcp', + 'enable_ipv6': True, + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:c0::3/64', + 'ipv6gateway': '2001:db8:dead:c0::1', + 'noifupdown': True, + }, + 'get_interface': odict([('eth19', odict([('enabled', True), ('data', odict([ + ('inet', odict([ + ('addrfam', 'inet'), + ('proto', 'dhcp'), + ('filename', None), + ])), + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('address', '2001:db8:dead:c0::3/64'), + ('gateway', '2001:db8:dead:c0::1'), + ])), + ]))]))]), + 'return': [ + 'auto eth19\n', + 'iface eth19 inet dhcp\n', + 'iface eth19 inet6 static\n', + ' address 2001:db8:dead:c0::3/64\n', + ' gateway 2001:db8:dead:c0::1\n', + '\n']}, + + # IPv6-only; static with autoconf and accept_ra forced + {'iface_name': 'eth20', 'iface_type': 'eth', 'enabled': True, + 'build_interface': { + 'ipv6proto': 'static', + 'ipv6ipaddr': '2001:db8:dead:beef::3/64', + 'ipv6gateway': '2001:db8:dead:beef::1', + 'enable_ipv6': True, + 'autoconf': 1, + 'accept_ra': 2, + 'noifupdown': True, + }, + 'get_interface': odict([('eth20', odict([('enabled', True), ('data', odict([ + ('inet6', odict([ + ('addrfam', 'inet6'), + ('proto', 'static'), + ('filename', None), + ('autoconf', 1), + ('address', '2001:db8:dead:beef::3/64'), + ('gateway', '2001:db8:dead:beef::1'), + ('accept_ra', 2), + ])), + ]))]))]), + 'return': [ + 'auto eth20\n', + 'iface eth20 inet6 static\n', + ' autoconf 1\n', + ' address 2001:db8:dead:beef::3/64\n', + ' gateway 2001:db8:dead:beef::1\n', + ' accept_ra 2\n', + '\n']}, + ] +# fmt: on + + +@pytest.fixture +def configure_loader_modules(): + return {debian_ip: {}} + + +# 'build_bond' function tests: 3 + + +def test_build_bond(): + """ + Test if it create a bond script in /etc/modprobe.d with the passed + settings and load the bonding kernel module. + """ + with patch( + "salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={}) + ), patch("salt.modules.debian_ip._write_file", MagicMock(return_value=True)): + mock = MagicMock(return_value=1) + with patch.dict(debian_ip.__grains__, {"osrelease": mock}): + mock = MagicMock(return_value=True) + with patch.dict( + debian_ip.__salt__, {"kmod.load": mock, "pkg.install": mock} + ): + assert debian_ip.build_bond("bond0") == "" + + +def test_error_message_iface_should_process_non_str_expected(): + values = [1, True, False, "no-kaboom"] + iface = "ethtest" + option = "test" + msg = debian_ip._error_msg_iface(iface, option, values) + assert msg.endswith("[1|True|False|no-kaboom]"), msg + + +def test_error_message_network_should_process_non_str_expected(): + values = [1, True, False, "no-kaboom"] + msg = debian_ip._error_msg_network("fnord", values) + assert msg.endswith("[1|True|False|no-kaboom]"), msg + + +def test_build_bond_exception(): + """ + Test if it create a bond script in /etc/modprobe.d with the passed + settings and load the bonding kernel module. + """ + with patch( + "salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={}) + ): + mock = MagicMock(return_value=1) + with patch.dict(debian_ip.__grains__, {"osrelease": mock}): + mock = MagicMock(side_effect=jinja2.exceptions.TemplateNotFound("error")) + with patch.object(jinja2.Environment, "get_template", mock): + assert debian_ip.build_bond("bond0") == "" + + +def test_build_bond_data(): + """ + Test if it create a bond script in /etc/modprobe.d with the passed + settings and load the bonding kernel module. + """ + with patch( + "salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={}) + ), patch("salt.modules.debian_ip._read_temp", MagicMock(return_value=True)): + mock = MagicMock(return_value=1) + with patch.dict(debian_ip.__grains__, {"osrelease": mock}): + assert debian_ip.build_bond("bond0", test="True") + + +# 'build_routes' function tests: 2 + + +def test_build_routes(): + """ + Test if it add route scripts for a network interface using up commands. + """ + with patch( + "salt.modules.debian_ip._parse_routes", + MagicMock(return_value={"routes": []}), + ), patch( + "salt.modules.debian_ip._write_file_routes", MagicMock(return_value=True) + ), patch( + "salt.modules.debian_ip._read_file", MagicMock(return_value="salt") + ): + assert debian_ip.build_routes("eth0") == "saltsalt" + + +def test_build_routes_exception(): + """ + Test if it add route scripts for a network interface using up commands. + """ + with patch( + "salt.modules.debian_ip._parse_routes", + MagicMock(return_value={"routes": []}), + ): + assert debian_ip.build_routes("eth0", test="True") + + mock = MagicMock(side_effect=jinja2.exceptions.TemplateNotFound("err")) + with patch.object(jinja2.Environment, "get_template", mock): + assert debian_ip.build_routes("eth0") == "" + + +# 'down' function tests: 1 + + +def test_down(): + """ + Test if it shutdown a network interface + """ + assert debian_ip.down("eth0", "slave") is None + + mock = MagicMock(return_value="Salt") + with patch.dict(debian_ip.__salt__, {"cmd.run": mock}): + assert debian_ip.down("eth0", "eth") == "Salt" + + +# 'get_bond' function tests: 1 + + +def test_get_bond(): + """ + Test if it return the content of a bond script + """ + assert debian_ip.get_bond("bond0") == "" + + +# '_parse_interfaces' function tests: 1 + + +def test_parse_interfaces(test_interfaces): + """ + Test if it returns the correct data for parsed configuration file + """ + with tempfile.NamedTemporaryFile(mode="r", delete=True) as tfile: + for iface in test_interfaces: + iname = iface["iface_name"] + if iface.get("skip_test", False): + continue + with salt.utils.files.fopen(str(tfile.name), "w") as fh: + fh.writelines(iface["return"]) + for inet in ["inet", "inet6"]: + if inet in iface["get_interface"][iname]["data"]: + iface["get_interface"][iname]["data"][inet]["filename"] = str( + tfile.name + ) + assert ( + debian_ip._parse_interfaces([str(tfile.name)]) == iface["get_interface"] + ) + + +# 'get_interface' function tests: 1 + + +def test_get_interface(test_interfaces): + """ + Test if it return the contents of an interface script + """ + for iface in test_interfaces: + if iface.get("skip_test", False): + continue + with patch.object( + debian_ip, + "_parse_interfaces", + MagicMock(return_value=iface["get_interface"]), + ): + assert debian_ip.get_interface(iface["iface_name"]) == iface["return"] + + +# 'build_interface' function tests: 1 + + +def test_build_interface(test_interfaces): + """ + Test if it builds an interface script for a network interface. + """ + with patch( + "salt.modules.debian_ip._write_file_ifaces", MagicMock(return_value="salt") + ): + assert debian_ip.build_interface("eth0", "eth", "enabled") == [ + "s\n", + "a\n", + "l\n", + "t\n", + ] + + assert debian_ip.build_interface("eth0", "eth", "enabled", test="True") + + with patch.object( + debian_ip, "_parse_settings_eth", MagicMock(return_value={"routes": []}) + ): + for eth_t in ["bridge", "slave", "bond"]: + pytest.raises( + AttributeError, + debian_ip.build_interface, + "eth0", + eth_t, + "enabled", + ) + + assert debian_ip.build_interface("eth0", "eth", "enabled", test="True") + + with tempfile.NamedTemporaryFile(mode="r", delete=True) as tfile: + with patch("salt.modules.debian_ip._DEB_NETWORK_FILE", str(tfile.name)): + for iface in test_interfaces: + if iface.get("skip_test", False): + continue + # Skip tests that require __salt__['pkg.install']() + if iface["iface_type"] in ["bridge", "pppoe", "vlan"]: + continue + assert ( + debian_ip.build_interface( + iface=iface["iface_name"], + iface_type=iface["iface_type"], + enabled=iface["enabled"], + interface_file=tfile.name, + **iface["build_interface"] + ) + == iface["return"] + ) + + +# 'up' function tests: 1 + + +def test_up(): + """ + Test if it start up a network interface + """ + assert debian_ip.down("eth0", "slave") is None + + mock = MagicMock(return_value="Salt") + with patch.dict(debian_ip.__salt__, {"cmd.run": mock}): + assert debian_ip.up("eth0", "eth") == "Salt" + + +# 'get_network_settings' function tests: 1 + + +def test_get_network_settings(): + """ + Test if it return the contents of the global network script. + """ + with patch.dict( + debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "14"} + ), patch( + "salt.modules.debian_ip._parse_hostname", + MagicMock(return_value="SaltStack"), + ), patch( + "salt.modules.debian_ip._parse_domainname", + MagicMock(return_value="saltstack.com"), + ): + mock_avai = MagicMock(return_value=True) + with patch.dict( + debian_ip.__salt__, + {"service.available": mock_avai, "service.status": mock_avai}, + ): + assert debian_ip.get_network_settings() == [ + "NETWORKING=yes\n", + "HOSTNAME=SaltStack\n", + "DOMAIN=saltstack.com\n", + ] + + mock = MagicMock(side_effect=jinja2.exceptions.TemplateNotFound("error")) + with patch.object(jinja2.Environment, "get_template", mock): + assert debian_ip.get_network_settings() == "" + + +# 'get_routes' function tests: 1 + + +def test_get_routes(): + """ + Test if it return the routes for the interface + """ + with patch("salt.modules.debian_ip._read_file", MagicMock(return_value="salt")): + assert debian_ip.get_routes("eth0") == "saltsalt" + + +# 'apply_network_settings' function tests: 1 + + +@pytest.mark.slow_test +def test_apply_network_settings(): + """ + Test if it apply global network configuration. + """ + mock = MagicMock(return_value=True) + with patch.dict( + debian_ip.__salt__, + {"network.mod_hostname": mock, "service.stop": mock, "service.start": mock}, + ): + assert debian_ip.apply_network_settings() is True + + +# 'build_network_settings' function tests: 1 + + +def test_build_network_settings(): + """ + Test if it build the global network script. + """ + with patch( + "salt.modules.debian_ip._parse_network_settings", + MagicMock( + return_value={ + "networking": "yes", + "hostname": "Salt.saltstack.com", + "domainname": "saltstack.com", + "search": "test.saltstack.com", + } + ), + ), patch( + "salt.modules.debian_ip._write_file_network", MagicMock(return_value=True) + ): + with patch.dict( + debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "14"} + ): + mock = MagicMock(return_value=True) + with patch.dict( + debian_ip.__salt__, + { + "service.available": mock, + "service.disable": mock, + "service.enable": mock, + }, + ): + assert debian_ip.build_network_settings() == [ + "NETWORKING=yes\n", + "HOSTNAME=Salt\n", + "DOMAIN=saltstack.com\n", + "SEARCH=test.saltstack.com\n", + ] + + mock = MagicMock( + side_effect=jinja2.exceptions.TemplateNotFound("error") + ) + with patch.object(jinja2.Environment, "get_template", mock): + assert debian_ip.build_network_settings() == "" + + with patch.dict( + debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "10"} + ): + mock = MagicMock(return_value=True) + with patch.dict( + debian_ip.__salt__, + { + "service.available": mock, + "service.disable": mock, + "service.enable": mock, + }, + ): + mock = MagicMock( + side_effect=jinja2.exceptions.TemplateNotFound("error") + ) + with patch.object(jinja2.Environment, "get_template", mock): + assert debian_ip.build_network_settings() == "" + + with patch.object( + debian_ip, "_read_temp", MagicMock(return_value=True) + ): + assert debian_ip.build_network_settings(test="True") def test_when_no_adapters_are_passed_to_filter_none_should_be_returned(): diff --git a/tests/pytests/unit/modules/test_dig.py b/tests/pytests/unit/modules/test_dig.py index 29cdb816d2af..dcc0acc82953 100644 --- a/tests/pytests/unit/modules/test_dig.py +++ b/tests/pytests/unit/modules/test_dig.py @@ -1,19 +1,50 @@ +""" + Test cases for salt.modules.dig +""" + + import pytest -import salt.modules.cmdmod as cmdmod import salt.modules.dig as dig from tests.support.mock import MagicMock, patch @pytest.fixture def configure_loader_modules(): - return { - dig: { - "__salt__": { - "cmd.run_all": cmdmod.run_all, + return {dig: {}} + + +class SpfValues: + def __call__(self, key, python_shell=False): + _spf_values = { + "dig +short xmission.com TXT": { + "pid": 27282, + "retcode": 0, + "stderr": "", + "stdout": '"v=spf1 a mx include:_spf.xmission.com ?all"', + }, + "dig +short _spf.xmission.com TXT": { + "pid": 27282, + "retcode": 0, + "stderr": "", + "stdout": '"v=spf1 a mx ip4:198.60.22.0/24 ip4:166.70.13.0/24 ~all"', + }, + "dig +short xmission-redirect.com TXT": { + "pid": 27282, + "retcode": 0, + "stderr": "", + "stdout": "v=spf1 redirect=_spf.xmission.com", + }, + "dig +short foo.com TXT": { + "pid": 27282, + "retcode": 0, + "stderr": "", + "stdout": "v=spf1 ip4:216.73.93.70/31 ip4:216.73.93.72/31 ~all", }, } - } + return _spf_values.get( + " ".join(key), {"pid": 27310, "retcode": 0, "stderr": "", "stdout": ""} + ) def test_dig_cname_found(): @@ -40,3 +71,143 @@ def test_dig_cname_none_found(): ) with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): assert dig.CNAME("www.google.com") == "" + + +def test_check_ip(): + assert dig.check_ip("127.0.0.1") + + +def test_check_ip_ipv6(): + assert dig.check_ip("1111:2222:3333:4444:5555:6666:7777:8888") + + +def test_check_ip_ipv6_valid(): + assert dig.check_ip("2607:fa18:0:3::4") + + +def test_check_ip_neg(): + assert not dig.check_ip("-127.0.0.1") + + +def test_check_ip_empty(): + assert not dig.check_ip("") + + +def test_a(): + dig_mock = MagicMock( + return_value={ + "pid": 3656, + "retcode": 0, + "stderr": "", + "stdout": ( + "74.125.193.104\n" + "74.125.193.105\n" + "74.125.193.99\n" + "74.125.193.106\n" + "74.125.193.103\n" + "74.125.193.147" + ), + } + ) + with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): + assert dig.A("www.google.com") == [ + "74.125.193.104", + "74.125.193.105", + "74.125.193.99", + "74.125.193.106", + "74.125.193.103", + "74.125.193.147", + ] + + +def test_ptr(): + dig_mock = MagicMock( + return_value={ + "pid": 3657, + "retcode": 0, + "stderr": "", + "stdout": ("dns.google."), + } + ) + with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): + assert dig.ptr("8.8.8.8") == [ + "dns.google.", + ] + + +def test_aaaa(): + dig_mock = MagicMock( + return_value={ + "pid": 25451, + "retcode": 0, + "stderr": "", + "stdout": "2607:f8b0:400f:801::1014", + } + ) + with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): + assert dig.AAAA("www.google.com") == ["2607:f8b0:400f:801::1014"] + + +def test_ns(): + with patch("salt.modules.dig.A", MagicMock(return_value=["ns4.google.com."])): + dig_mock = MagicMock( + return_value={ + "pid": 26136, + "retcode": 0, + "stderr": "", + "stdout": "ns4.google.com.", + } + ) + with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): + assert dig.NS("google.com") == ["ns4.google.com."] + + +def test_spf(): + dig_mock = MagicMock(side_effect=SpfValues()) + with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): + assert dig.SPF("foo.com") == ["216.73.93.70/31", "216.73.93.72/31"] + + +def test_spf_redir(): + """ + Test for SPF records which use the 'redirect' SPF mechanism + https://en.wikipedia.org/wiki/Sender_Policy_Framework#Mechanisms + """ + dig_mock = MagicMock(side_effect=SpfValues()) + with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): + assert dig.SPF("xmission-redirect.com") == ["198.60.22.0/24", "166.70.13.0/24"] + + +def test_spf_include(): + """ + Test for SPF records which use the 'include' SPF mechanism + https://en.wikipedia.org/wiki/Sender_Policy_Framework#Mechanisms + """ + dig_mock = MagicMock(side_effect=SpfValues()) + with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): + assert dig.SPF("xmission.com") == ["198.60.22.0/24", "166.70.13.0/24"] + + +def test_mx(): + dig_mock = MagicMock( + return_value={ + "pid": 27780, + "retcode": 0, + "stderr": "", + "stdout": ( + "10 aspmx.l.google.com.\n" + "20 alt1.aspmx.l.google.com.\n" + "40 alt3.aspmx.l.google.com.\n" + "50 alt4.aspmx.l.google.com.\n" + "30 alt2.aspmx.l.google.com." + ), + } + ) + with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): + assert dig.MX("google.com") == [ + ["10", "aspmx.l.google.com."], + ["20", "alt1.aspmx.l.google.com."], + ["40", "alt3.aspmx.l.google.com."], + ["50", "alt4.aspmx.l.google.com."], + ["30", "alt2.aspmx.l.google.com."], + ] diff --git a/tests/pytests/unit/modules/test_dnsutil.py b/tests/pytests/unit/modules/test_dnsutil.py new file mode 100644 index 000000000000..35e04519ad6d --- /dev/null +++ b/tests/pytests/unit/modules/test_dnsutil.py @@ -0,0 +1,137 @@ +""" + :codeauthor: Nicole Thomas + + TestCase for salt.modules.dnsutil +""" + +import pytest + +import salt.modules.dnsutil as dnsutil +import salt.utils.stringutils +from tests.support.mock import MagicMock, mock_open, patch + + +@pytest.fixture +def mock_hosts_file(): + return ( + "##\n" + "# Host Database\n" + "#\n" + "# localhost is used to configure the loopback interface\n" + "# when the system is booting. Do not change this entry.\n" + "##\n" + "127.0.0.1 localhost\n" + "255.255.255.255 broadcasthost\n" + "::1 localhost\n" + "fe80::1%lo0 localhost" + ) + + +@pytest.fixture +def mock_hosts_file_rtn(): + return { + "::1": ["localhost"], + "255.255.255.255": ["broadcasthost"], + "127.0.0.1": ["localhost"], + "fe80::1%lo0": ["localhost"], + } + + +@pytest.fixture +def mock_soa_zone(): + return ( + "$TTL 3D\n" + "@ IN SOA land-5.com. root.land-5.com. (\n" + "199609203 ; Serial\n" + "28800 ; Refresh\n" + "7200 ; Retry\n" + "604800 ; Expire\n" + "86400) ; Minimum TTL\n" + "NS land-5.com.\n\n" + "1 PTR localhost." + ) + + +@pytest.fixture +def mock_writes_list(): + return [ + "##\n", + "# Host Database\n", + "#\n", + "# localhost is used to configure the loopback interface\n", + "# when the system is booting. Do not change this entry.\n", + "##\n", + "127.0.0.1 localhost", + "\n", + "255.255.255.255 broadcasthost", + "\n", + "::1 localhost", + "\n", + "fe80::1%lo0 localhost", + "\n", + ] + + +@pytest.fixture +def configure_loader_modules(): + return {dnsutil: {}} + + +def test_parse_hosts(mock_hosts_file): + with patch("salt.utils.files.fopen", mock_open(read_data=mock_hosts_file)): + assert dnsutil.parse_hosts() == { + "::1": ["localhost"], + "255.255.255.255": ["broadcasthost"], + "127.0.0.1": ["localhost"], + "fe80::1%lo0": ["localhost"], + } + + +def test_hosts_append(mock_hosts_file, mock_hosts_file_rtn): + with patch( + "salt.utils.files.fopen", mock_open(read_data=mock_hosts_file) + ) as m_open, patch( + "salt.modules.dnsutil.parse_hosts", + MagicMock(return_value=mock_hosts_file_rtn), + ): + dnsutil.hosts_append("/etc/hosts", "127.0.0.1", "ad1.yuk.co,ad2.yuk.co") + writes = m_open.write_calls() + # We should have called .write() only once, with the expected + # content + num_writes = len(writes) + assert num_writes == 1, num_writes + expected = salt.utils.stringutils.to_str("\n127.0.0.1 ad1.yuk.co ad2.yuk.co") + assert writes[0] == expected, writes[0] + + +def test_hosts_remove(mock_hosts_file, mock_writes_list): + to_remove = "ad1.yuk.co" + new_mock_file = mock_hosts_file + "\n127.0.0.1 " + to_remove + "\n" + with patch("salt.utils.files.fopen", mock_open(read_data=new_mock_file)) as m_open: + dnsutil.hosts_remove("/etc/hosts", to_remove) + writes = m_open.write_calls() + assert writes == mock_writes_list, writes + + +def test_to_seconds_hour(): + assert dnsutil._to_seconds("4H") == 14400, "Did not detect valid hours as invalid" + + +def test_to_seconds_day(): + assert dnsutil._to_seconds("1D") == 86400, "Did not detect valid day as invalid" + + +def test_to_seconds_week(): + assert ( + dnsutil._to_seconds("2W") == 604800 + ), "Did not set time greater than one week to one week" + + +def test_to_seconds_empty(): + assert dnsutil._to_seconds("") == 604800, "Did not set empty time to one week" + + +def test_to_seconds_large(): + assert ( + dnsutil._to_seconds("604801") == 604800 + ), "Did not set time greater than one week to one week" diff --git a/tests/pytests/unit/modules/test_dpkg_lowpkg.py b/tests/pytests/unit/modules/test_dpkg_lowpkg.py index 1a89660c02bb..41bd615ff296 100644 --- a/tests/pytests/unit/modules/test_dpkg_lowpkg.py +++ b/tests/pytests/unit/modules/test_dpkg_lowpkg.py @@ -1,9 +1,369 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.dpkg +""" + + +import logging import os +import pytest + import salt.modules.dpkg_lowpkg as dpkg from tests.support.mock import MagicMock, mock_open, patch +@pytest.fixture +def configure_loader_modules(): + return {dpkg: {}} + + +def setUp(self): + dpkg_lowpkg_logger = logging.getLogger("salt.modules.dpkg_lowpkg") + self.level = dpkg_lowpkg_logger.level + dpkg_lowpkg_logger.setLevel(logging.FATAL) + + +def tearDown(self): + logging.getLogger("salt.modules.dpkg_lowpkg").setLevel(self.level) + + +def dpkg_L_side_effect(cmd, **kwargs): + assert cmd[:2] == ["dpkg", "-L"] + package = cmd[2] + return dpkg_l_output[package] + + +dpkg_error_msg = """dpkg-query: package 'httpd' is not installed +Use dpkg --contents (= dpkg-deb --contents) to list archive files contents. +""" + + +dpkg_l_output = { + "hostname": """\ +/. +/bin +/bin/hostname +/usr +/usr/share +/usr/share/doc +/usr/share/doc/hostname +/usr/share/doc/hostname/changelog.gz +/usr/share/doc/hostname/copyright +/usr/share/man +/usr/share/man/man1 +/usr/share/man/man1/hostname.1.gz +/bin/dnsdomainname +/bin/domainname +/bin/nisdomainname +/bin/ypdomainname +/usr/share/man/man1/dnsdomainname.1.gz +/usr/share/man/man1/domainname.1.gz +/usr/share/man/man1/nisdomainname.1.gz +/usr/share/man/man1/ypdomainname.1.gz +""" +} + + +# 'unpurge' function tests: 2 + + +def test_unpurge(): + """ + Test if it change package selection for each package + specified to 'install' + """ + mock = MagicMock(return_value=[]) + with patch.dict(dpkg.__salt__, {"pkg.list_pkgs": mock, "cmd.run": mock}): + assert dpkg.unpurge("curl") == {} + + +def test_unpurge_empty_package(): + """ + Test if it change package selection for each package + specified to 'install' + """ + assert dpkg.unpurge() == {} + + +# 'list_pkgs' function tests: 1 + + +def test_list_pkgs(): + """ + Test if it lists the packages currently installed + """ + mock = MagicMock( + return_value={ + "retcode": 0, + "stderr": "", + "stdout": "installed\thostname\t3.21", + } + ) + with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}): + assert dpkg.list_pkgs("hostname") == {"hostname": "3.21"} + + mock = MagicMock( + return_value={ + "retcode": 1, + "stderr": "dpkg-query: no packages found matching httpd", + "stdout": "", + } + ) + with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}): + assert ( + dpkg.list_pkgs("httpd") + == "Error: dpkg-query: no packages found matching httpd" + ) + + +# 'file_list' function tests: 1 + + +def test_file_list(): + """ + Test if it lists the files that belong to a package. + """ + dpkg_query_mock = MagicMock( + return_value={"retcode": 0, "stderr": "", "stdout": "installed\thostname"} + ) + dpkg_L_mock = MagicMock(side_effect=dpkg_L_side_effect) + with patch.dict( + dpkg.__salt__, {"cmd.run_all": dpkg_query_mock, "cmd.run": dpkg_L_mock} + ): + assert dpkg.file_list("hostname") == { + "errors": [], + "files": [ + "/.", + "/bin", + "/bin/dnsdomainname", + "/bin/domainname", + "/bin/hostname", + "/bin/nisdomainname", + "/bin/ypdomainname", + "/usr", + "/usr/share", + "/usr/share/doc", + "/usr/share/doc/hostname", + "/usr/share/doc/hostname/changelog.gz", + "/usr/share/doc/hostname/copyright", + "/usr/share/man", + "/usr/share/man/man1", + "/usr/share/man/man1/dnsdomainname.1.gz", + "/usr/share/man/man1/domainname.1.gz", + "/usr/share/man/man1/hostname.1.gz", + "/usr/share/man/man1/nisdomainname.1.gz", + "/usr/share/man/man1/ypdomainname.1.gz", + ], + } + + mock = MagicMock( + return_value={"retcode": 1, "stderr": dpkg_error_msg, "stdout": ""} + ) + with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}): + assert dpkg.file_list("httpd") == "Error: " + dpkg_error_msg + + +# 'file_dict' function tests: 1 + + +def test_file_dict(): + """ + Test if it lists the files that belong to a package, grouped by package + """ + dpkg_query_mock = MagicMock( + return_value={"retcode": 0, "stderr": "", "stdout": "installed\thostname"} + ) + dpkg_L_mock = MagicMock(side_effect=dpkg_L_side_effect) + with patch.dict( + dpkg.__salt__, {"cmd.run_all": dpkg_query_mock, "cmd.run": dpkg_L_mock} + ): + expected = { + "errors": [], + "packages": { + "hostname": [ + "/.", + "/bin", + "/bin/hostname", + "/usr", + "/usr/share", + "/usr/share/doc", + "/usr/share/doc/hostname", + "/usr/share/doc/hostname/changelog.gz", + "/usr/share/doc/hostname/copyright", + "/usr/share/man", + "/usr/share/man/man1", + "/usr/share/man/man1/hostname.1.gz", + "/bin/dnsdomainname", + "/bin/domainname", + "/bin/nisdomainname", + "/bin/ypdomainname", + "/usr/share/man/man1/dnsdomainname.1.gz", + "/usr/share/man/man1/domainname.1.gz", + "/usr/share/man/man1/nisdomainname.1.gz", + "/usr/share/man/man1/ypdomainname.1.gz", + ] + }, + } + assert dpkg.file_dict("hostname") == expected + + mock = MagicMock( + return_value={"retcode": 1, "stderr": dpkg_error_msg, "stdout": ""} + ) + with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}): + assert dpkg.file_dict("httpd") == "Error: " + dpkg_error_msg + + +def test_bin_pkg_info_spaces(): + """ + Test the bin_pkg_info function + """ + file_proto_mock = MagicMock(return_value=True) + with patch.dict(dpkg.__salt__, {"config.valid_fileproto": file_proto_mock}): + cache_mock = MagicMock(return_value="/path/to/some/package.deb") + with patch.dict(dpkg.__salt__, {"cp.cache_file": cache_mock}): + dpkg_info_mock = MagicMock( + return_value={ + "retcode": 0, + "stderr": "", + "stdout": ( + " new Debian package, version 2.0\n" + " size 123456 bytes: control archive: 4029 bytes.\n" + " Package : package_name\n" + " Version : 1.0\n" + " Section : section_name\n" + " Priority : priority\n" + " Architecture : all\n" + " Description : some package\n" + ), + } + ) + with patch.dict(dpkg.__salt__, {"cmd.run_all": dpkg_info_mock}): + assert dpkg.bin_pkg_info("package.deb")["name"] == "package_name" + + +def test_bin_pkg_info_no_spaces(): + """ + Test the bin_pkg_info function + """ + file_proto_mock = MagicMock(return_value=True) + with patch.dict(dpkg.__salt__, {"config.valid_fileproto": file_proto_mock}): + cache_mock = MagicMock(return_value="/path/to/some/package.deb") + with patch.dict(dpkg.__salt__, {"cp.cache_file": cache_mock}): + dpkg_info_mock = MagicMock( + return_value={ + "retcode": 0, + "stderr": "", + "stdout": ( + " new Debian package, version 2.0\n" + " size 123456 bytes: control archive: 4029 bytes.\n" + " Package: package_name\n" + " Version: 1.0\n" + " Section: section_name\n" + " Priority: priority\n" + " Architecture: all\n" + " Description: some package\n" + ), + } + ) + with patch.dict(dpkg.__salt__, {"cmd.run_all": dpkg_info_mock}): + assert dpkg.bin_pkg_info("package.deb")["name"] == "package_name" + + +def test_info(): + """ + Test package info + """ + mock = MagicMock( + return_value={ + "retcode": 0, + "stderr": "", + "stdout": os.linesep.join( + [ + "package:bash", + "revision:", + "architecture:amd64", + "maintainer:Ubuntu Developers" + " ", + "summary:", + "source:bash", + "version:4.4.18-2ubuntu1", + "section:shells", + "installed_size:1588", + "size:", + "MD5:", + "SHA1:", + "SHA256:", + "origin:", + "homepage:http://tiswww.case.edu/php/chet/bash/bashtop.html", + "status:ii ", + "description:GNU Bourne Again SHell", + " Bash is an sh-compatible command language interpreter that" + " executes", + " commands read from the standard input or from a file. Bash" + " also", + " incorporates useful features from the Korn and C shells (ksh" + " and csh).", + " .", + " Bash is ultimately intended to be a conformant implementation" + " of the", + " IEEE POSIX Shell and Tools specification (IEEE Working Group" + " 1003.2).", + " .", + " The Programmable Completion Code, by Ian Macdonald, is now" + " found in", + " the bash-completion package.", + "", + "*/~^\\*", # pylint: disable=W1401 + ] + ), + } + ) + + with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}), patch.dict( + dpkg.__grains__, {"os": "Ubuntu", "osrelease_info": (18, 4)} + ), patch("salt.utils.path.which", MagicMock(return_value=False)), patch( + "os.path.exists", MagicMock(return_value=False) + ), patch( + "os.path.getmtime", MagicMock(return_value=1560199259.0) + ): + assert dpkg.info("bash") == { + "bash": { + "architecture": "amd64", + "description": os.linesep.join( + [ + "GNU Bourne Again SHell", + " Bash is an sh-compatible command language interpreter" + " that executes", + " commands read from the standard input or from a file." + " Bash also", + " incorporates useful features from the Korn and C" + " shells (ksh and csh).", + " .", + " Bash is ultimately intended to be a conformant" + " implementation of the", + " IEEE POSIX Shell and Tools specification (IEEE" + " Working Group 1003.2).", + " .", + " The Programmable Completion Code, by Ian Macdonald," + " is now found in", + " the bash-completion package." + os.linesep, + ] + ), + "homepage": "http://tiswww.case.edu/php/chet/bash/bashtop.html", + "maintainer": ( + "Ubuntu Developers " + ), + "package": "bash", + "section": "shells", + "source": "bash", + "status": "ii", + "version": "4.4.18-2ubuntu1", + } + } + + def test_get_pkg_license(): """ Test _get_pkg_license for ignore errors on reading license from copyright files diff --git a/tests/pytests/unit/modules/test_drbd.py b/tests/pytests/unit/modules/test_drbd.py new file mode 100644 index 000000000000..ce9625c2d0c4 --- /dev/null +++ b/tests/pytests/unit/modules/test_drbd.py @@ -0,0 +1,186 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.drbd +""" + + +import pytest + +import salt.modules.drbd as drbd +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {drbd: {}} + + +# 'overview' function tests: 1 + + +def test_overview(): + """ + Test if it shows status of the DRBD devices + """ + ret = { + "connection state": "True", + "device": "Stack", + "fs": "None", + "local disk state": "UpToDate", + "local role": "master", + "minor number": "Salt", + "mountpoint": "True", + "partner disk state": "UpToDate", + "partner role": "minion", + "percent": "888", + "remains": "666", + "total size": "50", + "used": "50", + } + mock = MagicMock( + return_value=( + "Salt:Stack True master/minion UpToDate/UpToDate True None 50 50 666 888" + ) + ) + with patch.dict(drbd.__salt__, {"cmd.run": mock}): + assert drbd.overview() == ret + + ret = { + "connection state": "True", + "device": "Stack", + "local disk state": "UpToDate", + "local role": "master", + "minor number": "Salt", + "partner disk state": "partner", + "partner role": "minion", + "synched": "5050", + "synchronisation: ": "syncbar", + } + mock = MagicMock( + return_value=( + "Salt:Stack True master/minion UpToDate/partner syncbar None 50 50" + ) + ) + with patch.dict(drbd.__salt__, {"cmd.run": mock}): + assert drbd.overview() == ret + + +def test_status(): + """ + Test if it shows status of the DRBD resources via drbdadm + """ + ret = [ + { + "local role": "Primary", + "local volumes": [{"disk": "UpToDate"}], + "peer nodes": [ + { + "peer volumes": [ + { + "done": "96.47", + "peer-disk": "Inconsistent", + "replication": "SyncSource", + } + ], + "peernode name": "opensuse-node2", + "role": "Secondary", + } + ], + "resource name": "single", + } + ] + + mock = MagicMock( + return_value=""" +single role:Primary + disk:UpToDate + opensuse-node2 role:Secondary + replication:SyncSource peer-disk:Inconsistent done:96.47 +""" + ) + + with patch.dict(drbd.__salt__, {"cmd.run": mock}): + assert drbd.status() == ret + + ret = [ + { + "local role": "Primary", + "local volumes": [ + {"disk": "UpToDate", "volume": "0"}, + {"disk": "UpToDate", "volume": "1"}, + ], + "peer nodes": [ + { + "peer volumes": [ + {"peer-disk": "UpToDate", "volume": "0"}, + {"peer-disk": "UpToDate", "volume": "1"}, + ], + "peernode name": "node2", + "role": "Secondary", + }, + { + "peer volumes": [ + {"peer-disk": "UpToDate", "volume": "0"}, + {"peer-disk": "UpToDate", "volume": "1"}, + ], + "peernode name": "node3", + "role": "Secondary", + }, + ], + "resource name": "res", + }, + { + "local role": "Primary", + "local volumes": [ + {"disk": "UpToDate", "volume": "0"}, + {"disk": "UpToDate", "volume": "1"}, + ], + "peer nodes": [ + { + "peer volumes": [ + {"peer-disk": "UpToDate", "volume": "0"}, + {"peer-disk": "UpToDate", "volume": "1"}, + ], + "peernode name": "node2", + "role": "Secondary", + }, + { + "peer volumes": [ + {"peer-disk": "UpToDate", "volume": "0"}, + {"peer-disk": "UpToDate", "volume": "1"}, + ], + "peernode name": "node3", + "role": "Secondary", + }, + ], + "resource name": "test", + }, + ] + + mock = MagicMock( + return_value=""" +res role:Primary + volume:0 disk:UpToDate + volume:1 disk:UpToDate + node2 role:Secondary + volume:0 peer-disk:UpToDate + volume:1 peer-disk:UpToDate + node3 role:Secondary + volume:0 peer-disk:UpToDate + volume:1 peer-disk:UpToDate + +test role:Primary + volume:0 disk:UpToDate + volume:1 disk:UpToDate + node2 role:Secondary + volume:0 peer-disk:UpToDate + volume:1 peer-disk:UpToDate + node3 role:Secondary + volume:0 peer-disk:UpToDate + volume:1 peer-disk:UpToDate + +""" + ) + with patch.dict(drbd.__salt__, {"cmd.run": mock}): + assert drbd.status() == ret diff --git a/tests/unit/modules/test_glusterfs.py b/tests/pytests/unit/modules/test_glusterfs.py similarity index 52% rename from tests/unit/modules/test_glusterfs.py rename to tests/pytests/unit/modules/test_glusterfs.py index a107ce6acd9d..2816a18ffd33 100644 --- a/tests/unit/modules/test_glusterfs.py +++ b/tests/pytests/unit/modules/test_glusterfs.py @@ -1,14 +1,16 @@ """ :codeauthor: Jayesh Kariya :codeauthor: Joe Julian + + Test cases for salt.modules.glusterfs """ +import pytest + import salt.modules.glusterfs as glusterfs from salt.exceptions import SaltInvocationError -from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase class GlusterResults: @@ -498,395 +500,399 @@ class peer_probe: """ -class GlusterfsTestCase(TestCase, LoaderModuleMockMixin): +@pytest.fixture +def configure_loader_modules(): + return {glusterfs: {}} + +maxDiff = None + +# 'peer_status' function tests: 1 + + +def test__get_version(): """ - Test cases for salt.modules.glusterfs + Test parsing of gluster --version. """ + mock_version = MagicMock(return_value="foo") + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): + assert glusterfs._get_version() == (3, 6), "default behaviour" + + mock_version = MagicMock(return_value=version_output_362) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): + assert glusterfs._get_version() == (3, 6, 2) + + mock_version = MagicMock(return_value=version_output_61) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): + assert glusterfs._get_version() == (6, 1) + + more_versions = { + "6.0": (6, 0), + "4.1.10": (4, 1, 10), + "5.13": (5, 13), + "10.0": (10, 0), + } + for v in more_versions: + mock_version = MagicMock(return_value="glusterfs {}".format(v)) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): + assert glusterfs._get_version() == more_versions[v] - def setup_loader_modules(self): - return {glusterfs: {}} - maxDiff = None +def test_peer_status(): + """ + Test gluster peer status + """ + mock_run = MagicMock(return_value=xml_peer_present) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.peer_status() == { + "uuid1": {"hostnames": ["node02", "node02.domain.dom", "10.0.0.2"]} + } - # 'peer_status' function tests: 1 + mock_run = MagicMock(return_value=xml_command_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.peer_status() == {} - def test__get_version(self): - """ - Test parsing of gluster --version. - """ - mock_version = MagicMock(return_value="foo") - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): - self.assertEqual(glusterfs._get_version(), (3, 6), msg="default behaviour") - mock_version = MagicMock(return_value=version_output_362) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): - self.assertEqual(glusterfs._get_version(), (3, 6, 2)) +# 'peer' function tests: 1 - mock_version = MagicMock(return_value=version_output_61) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): - self.assertEqual(glusterfs._get_version(), (6, 1)) - more_versions = { - "6.0": (6, 0), - "4.1.10": (4, 1, 10), - "5.13": (5, 13), - "10.0": (10, 0), - } - for v in more_versions: - mock_version = MagicMock(return_value="glusterfs {}".format(v)) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): - self.assertEqual(glusterfs._get_version(), more_versions[v]) - - def test_peer_status(self): - """ - Test gluster peer status - """ - mock_run = MagicMock(return_value=xml_peer_present) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertDictEqual( - glusterfs.peer_status(), - {"uuid1": {"hostnames": ["node02", "node02.domain.dom", "10.0.0.2"]}}, - ) +def test_peer(): + """ + Test if gluster peer call is successful. + """ + mock_run = MagicMock() + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + mock_run.return_value = xml_peer_probe_already_member + assert glusterfs.peer("salt") - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertDictEqual(glusterfs.peer_status(), {}) + mock_run.return_value = xml_peer_probe_localhost + assert glusterfs.peer("salt") - # 'peer' function tests: 1 + mock_run.return_value = xml_peer_probe_fail_cant_connect + assert not glusterfs.peer("salt") - def test_peer(self): - """ - Test if gluster peer call is successful. - """ - mock_run = MagicMock() - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - mock_run.return_value = xml_peer_probe_already_member - self.assertTrue(glusterfs.peer("salt")) - mock_run.return_value = xml_peer_probe_localhost - self.assertTrue(glusterfs.peer("salt")) +# 'create_volume' function tests: 1 - mock_run.return_value = xml_peer_probe_fail_cant_connect - self.assertFalse(glusterfs.peer("salt")) - # 'create_volume' function tests: 1 +def test_create_volume(): + """ + Test if it creates a glusterfs volume. + """ + mock_run = MagicMock(return_value=xml_command_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + pytest.raises( + SaltInvocationError, glusterfs.create_volume, "newvolume", "host1:brick" + ) - def test_create_volume(self): - """ - Test if it creates a glusterfs volume. - """ - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertRaises( - SaltInvocationError, glusterfs.create_volume, "newvolume", "host1:brick" - ) + pytest.raises( + SaltInvocationError, glusterfs.create_volume, "newvolume", "host1/brick" + ) - self.assertRaises( - SaltInvocationError, glusterfs.create_volume, "newvolume", "host1/brick" - ) + assert not mock_run.called - self.assertFalse(mock_run.called) + mock_start_volume = MagicMock(return_value=True) + with patch.object(glusterfs, "start_volume", mock_start_volume): + # Create, do not start + assert glusterfs.create_volume("newvolume", "host1:/brick") + assert not mock_start_volume.called - mock_start_volume = MagicMock(return_value=True) - with patch.object(glusterfs, "start_volume", mock_start_volume): - # Create, do not start - self.assertTrue(glusterfs.create_volume("newvolume", "host1:/brick")) - self.assertFalse(mock_start_volume.called) + # Create and start + assert glusterfs.create_volume("newvolume", "host1:/brick", start=True) + assert mock_start_volume.called - # Create and start - self.assertTrue( - glusterfs.create_volume("newvolume", "host1:/brick", start=True) - ) - self.assertTrue(mock_start_volume.called) + mock_start_volume.return_value = False + # Create and fail start + assert not glusterfs.create_volume("newvolume", "host1:/brick", start=True) - mock_start_volume.return_value = False - # Create and fail start - self.assertFalse( - glusterfs.create_volume("newvolume", "host1:/brick", start=True) - ) + mock_run.return_value = xml_command_fail + assert not glusterfs.create_volume( + "newvolume", "host1:/brick", True, True, True, "tcp", True + ) - mock_run.return_value = xml_command_fail - self.assertFalse( - glusterfs.create_volume( - "newvolume", "host1:/brick", True, True, True, "tcp", True - ) - ) - # 'list_volumes' function tests: 1 +# 'list_volumes' function tests: 1 + - def test_list_volumes(self): - """ - Test if it list configured volumes - """ - mock = MagicMock(return_value=xml_volume_absent) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock}): - self.assertListEqual(glusterfs.list_volumes(), []) +def test_list_volumes(): + """ + Test if it list configured volumes + """ + mock = MagicMock(return_value=xml_volume_absent) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock}): + assert glusterfs.list_volumes() == [] - mock = MagicMock(return_value=xml_volume_present) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock}): - self.assertListEqual(glusterfs.list_volumes(), ["Newvolume1", "Newvolume2"]) + mock = MagicMock(return_value=xml_volume_present) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock}): + assert glusterfs.list_volumes() == ["Newvolume1", "Newvolume2"] - # 'status' function tests: 1 - def test_status(self): - """ - Test if it check the status of a gluster volume. - """ - mock_run = MagicMock(return_value=xml_command_fail) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertIsNone(glusterfs.status("myvol1")) +# 'status' function tests: 1 + + +def test_status(): + """ + Test if it check the status of a gluster volume. + """ + mock_run = MagicMock(return_value=xml_command_fail) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.status("myvol1") is None + + res = { + "bricks": { + "node01:/tmp/foo": { + "host": "node01", + "hostname": "node01", + "online": True, + "path": "/tmp/foo", + "peerid": "830700d7-0684-497c-a12c-c02e365fb90b", + "pid": "2470", + "port": "49155", + "ports": {"rdma": "N/A", "tcp": "49155"}, + "status": "1", + } + }, + "healers": {}, + "nfs": { + "node01": { + "host": "NFS Server", + "hostname": "NFS Server", + "online": False, + "path": "localhost", + "peerid": "830700d7-0684-497c-a12c-c02e365fb90b", + "pid": "-1", + "port": "N/A", + "ports": {"rdma": "N/A", "tcp": "N/A"}, + "status": "0", + } + }, + } + mock = MagicMock(return_value=xml_volume_status) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock}): + assert glusterfs.status("myvol1") == res + - res = { +# 'start_volume' function tests: 1 + + +def test_volume_info(): + """ + Test if it returns the volume info. + """ + res = { + "myvol1": { + "brickCount": "1", "bricks": { - "node01:/tmp/foo": { - "host": "node01", - "hostname": "node01", - "online": True, - "path": "/tmp/foo", - "peerid": "830700d7-0684-497c-a12c-c02e365fb90b", - "pid": "2470", - "port": "49155", - "ports": {"rdma": "N/A", "tcp": "49155"}, - "status": "1", - } - }, - "healers": {}, - "nfs": { - "node01": { - "host": "NFS Server", - "hostname": "NFS Server", - "online": False, - "path": "localhost", - "peerid": "830700d7-0684-497c-a12c-c02e365fb90b", - "pid": "-1", - "port": "N/A", - "ports": {"rdma": "N/A", "tcp": "N/A"}, - "status": "0", + "brick1": { + "hostUuid": "830700d7-0684-497c-a12c-c02e365fb90b", + "path": "node01:/tmp/foo", + "uuid": "830700d7-0684-497c-a12c-c02e365fb90b", } }, + "disperseCount": "0", + "distCount": "1", + "id": "f03c2180-cf55-4f77-ae0b-3650f57c82a1", + "name": "myvol1", + "optCount": "1", + "options": {"performance.readdir-ahead": "on"}, + "redundancyCount": "0", + "replicaCount": "1", + "status": "1", + "statusStr": "Started", + "stripeCount": "1", + "transport": "0", + "type": "0", + "typeStr": "Distribute", } - mock = MagicMock(return_value=xml_volume_status) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock}): - self.assertDictEqual(glusterfs.status("myvol1"), res) - - # 'start_volume' function tests: 1 - - def test_volume_info(self): - """ - Test if it returns the volume info. - """ - res = { - "myvol1": { - "brickCount": "1", + } + mock = MagicMock(return_value=xml_volume_info_running) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock}): + assert glusterfs.info("myvol1") == res + + +def test_start_volume(): + """ + Test if it start a gluster volume. + """ + # Stopped volume + mock_info = MagicMock(return_value={"Newvolume1": {"status": "0"}}) + with patch.object(glusterfs, "info", mock_info): + mock_run = MagicMock(return_value=xml_command_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.start_volume("Newvolume1") is True + assert glusterfs.start_volume("nonExisting") is False + mock_run = MagicMock(return_value=xml_command_fail) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.start_volume("Newvolume1") is False + + # Started volume + mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}}) + with patch.object(glusterfs, "info", mock_info): + mock_run = MagicMock(return_value=xml_command_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.start_volume("Newvolume1", force=True) is True + mock_run = MagicMock(return_value=xml_command_fail) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + # cmd.run should not be called for already running volume: + assert glusterfs.start_volume("Newvolume1") is True + # except when forcing: + assert glusterfs.start_volume("Newvolume1", force=True) is False + + +# 'stop_volume' function tests: 1 + + +def test_stop_volume(): + """ + Test if it stop a gluster volume. + """ + # Stopped volume + mock_info = MagicMock(return_value={"Newvolume1": {"status": "0"}}) + with patch.object(glusterfs, "info", mock_info): + mock_run = MagicMock(return_value=xml_command_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.stop_volume("Newvolume1") is True + assert glusterfs.stop_volume("nonExisting") is False + mock_run = MagicMock(return_value=xml_command_fail) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + # cmd.run should not be called for already stopped volume: + assert glusterfs.stop_volume("Newvolume1") is True + + # Started volume + mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}}) + with patch.object(glusterfs, "info", mock_info): + mock_run = MagicMock(return_value=xml_command_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.stop_volume("Newvolume1") is True + assert glusterfs.stop_volume("nonExisting") is False + mock_run = MagicMock(return_value=xml_command_fail) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.stop_volume("Newvolume1") is False + + +# 'delete_volume' function tests: 1 + + +def test_delete_volume(): + """ + Test if it deletes a gluster volume. + """ + mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}}) + with patch.object(glusterfs, "info", mock_info): + # volume doesn't exist + assert not glusterfs.delete_volume("Newvolume3") + + mock_stop_volume = MagicMock(return_value=True) + mock_run = MagicMock(return_value=xml_command_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + with patch.object(glusterfs, "stop_volume", mock_stop_volume): + # volume exists, should not be stopped, and is started + assert not glusterfs.delete_volume("Newvolume1", False) + assert not mock_run.called + assert not mock_stop_volume.called + + # volume exists, should be stopped, and is started + assert glusterfs.delete_volume("Newvolume1") + assert mock_run.called + assert mock_stop_volume.called + + # volume exists and isn't started + mock_info = MagicMock(return_value={"Newvolume1": {"status": "2"}}) + with patch.object(glusterfs, "info", mock_info): + mock_run = MagicMock(return_value=xml_command_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.delete_volume("Newvolume1") + mock_run.return_value = xml_command_fail + assert not glusterfs.delete_volume("Newvolume1") + + +# 'add_volume_bricks' function tests: 1 + + +def test_add_volume_bricks(): + """ + Test if it add brick(s) to an existing volume + """ + mock_info = MagicMock( + return_value={ + "Newvolume1": { + "status": "1", "bricks": { - "brick1": { - "hostUuid": "830700d7-0684-497c-a12c-c02e365fb90b", - "path": "node01:/tmp/foo", - "uuid": "830700d7-0684-497c-a12c-c02e365fb90b", - } + "brick1": {"path": "host:/path1"}, + "brick2": {"path": "host:/path2"}, }, - "disperseCount": "0", - "distCount": "1", - "id": "f03c2180-cf55-4f77-ae0b-3650f57c82a1", - "name": "myvol1", - "optCount": "1", - "options": {"performance.readdir-ahead": "on"}, - "redundancyCount": "0", - "replicaCount": "1", - "status": "1", - "statusStr": "Started", - "stripeCount": "1", - "transport": "0", - "type": "0", - "typeStr": "Distribute", } } - mock = MagicMock(return_value=xml_volume_info_running) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock}): - self.assertDictEqual(glusterfs.info("myvol1"), res) - - def test_start_volume(self): - """ - Test if it start a gluster volume. - """ - # Stopped volume - mock_info = MagicMock(return_value={"Newvolume1": {"status": "0"}}) - with patch.object(glusterfs, "info", mock_info): - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertEqual(glusterfs.start_volume("Newvolume1"), True) - self.assertEqual(glusterfs.start_volume("nonExisting"), False) - mock_run = MagicMock(return_value=xml_command_fail) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertEqual(glusterfs.start_volume("Newvolume1"), False) - - # Started volume - mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}}) - with patch.object(glusterfs, "info", mock_info): - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertEqual(glusterfs.start_volume("Newvolume1", force=True), True) - mock_run = MagicMock(return_value=xml_command_fail) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - # cmd.run should not be called for already running volume: - self.assertEqual(glusterfs.start_volume("Newvolume1"), True) - # except when forcing: - self.assertEqual( - glusterfs.start_volume("Newvolume1", force=True), False - ) - - # 'stop_volume' function tests: 1 - - def test_stop_volume(self): - """ - Test if it stop a gluster volume. - """ - # Stopped volume - mock_info = MagicMock(return_value={"Newvolume1": {"status": "0"}}) - with patch.object(glusterfs, "info", mock_info): - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertEqual(glusterfs.stop_volume("Newvolume1"), True) - self.assertEqual(glusterfs.stop_volume("nonExisting"), False) - mock_run = MagicMock(return_value=xml_command_fail) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - # cmd.run should not be called for already stopped volume: - self.assertEqual(glusterfs.stop_volume("Newvolume1"), True) - - # Started volume - mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}}) - with patch.object(glusterfs, "info", mock_info): - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertEqual(glusterfs.stop_volume("Newvolume1"), True) - self.assertEqual(glusterfs.stop_volume("nonExisting"), False) - mock_run = MagicMock(return_value=xml_command_fail) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertEqual(glusterfs.stop_volume("Newvolume1"), False) - - # 'delete_volume' function tests: 1 - - def test_delete_volume(self): - """ - Test if it deletes a gluster volume. - """ - mock_info = MagicMock(return_value={"Newvolume1": {"status": "1"}}) - with patch.object(glusterfs, "info", mock_info): - # volume doesn't exist - self.assertFalse(glusterfs.delete_volume("Newvolume3")) - - mock_stop_volume = MagicMock(return_value=True) - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - with patch.object(glusterfs, "stop_volume", mock_stop_volume): - # volume exists, should not be stopped, and is started - self.assertFalse(glusterfs.delete_volume("Newvolume1", False)) - self.assertFalse(mock_run.called) - self.assertFalse(mock_stop_volume.called) - - # volume exists, should be stopped, and is started - self.assertTrue(glusterfs.delete_volume("Newvolume1")) - self.assertTrue(mock_run.called) - self.assertTrue(mock_stop_volume.called) - - # volume exists and isn't started - mock_info = MagicMock(return_value={"Newvolume1": {"status": "2"}}) - with patch.object(glusterfs, "info", mock_info): - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertTrue(glusterfs.delete_volume("Newvolume1")) - mock_run.return_value = xml_command_fail - self.assertFalse(glusterfs.delete_volume("Newvolume1")) - - # 'add_volume_bricks' function tests: 1 - - def test_add_volume_bricks(self): - """ - Test if it add brick(s) to an existing volume - """ - mock_info = MagicMock( - return_value={ - "Newvolume1": { - "status": "1", - "bricks": { - "brick1": {"path": "host:/path1"}, - "brick2": {"path": "host:/path2"}, - }, - } - } - ) - with patch.object(glusterfs, "info", mock_info): - mock_run = MagicMock(return_value=xml_command_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - # Volume does not exist - self.assertFalse(glusterfs.add_volume_bricks("nonExisting", ["bricks"])) - # Brick already exists - self.assertTrue( - glusterfs.add_volume_bricks("Newvolume1", ["host:/path2"]) - ) - # Already existing brick as a string - self.assertTrue( - glusterfs.add_volume_bricks("Newvolume1", "host:/path2") - ) - self.assertFalse(mock_run.called) - # A new brick: - self.assertTrue( - glusterfs.add_volume_bricks("Newvolume1", ["host:/new1"]) - ) - self.assertTrue(mock_run.called) - - # Gluster call fails - mock_run.return_value = xml_command_fail - self.assertFalse( - glusterfs.add_volume_bricks("Newvolume1", ["new:/path"]) - ) - - # 'get_op_version' function tests: 1 - - def test_get_op_version(self): - """ - Test retrieving the glusterfs op-version - """ - - # Test with xml output structure from v3.7 - mock_run = MagicMock(return_value=xml_op_version_37) + ) + with patch.object(glusterfs, "info", mock_info): + mock_run = MagicMock(return_value=xml_command_success) with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertEqual(glusterfs.get_op_version("test"), "30707") + # Volume does not exist + assert not glusterfs.add_volume_bricks("nonExisting", ["bricks"]) + # Brick already exists + assert glusterfs.add_volume_bricks("Newvolume1", ["host:/path2"]) + # Already existing brick as a string + assert glusterfs.add_volume_bricks("Newvolume1", "host:/path2") + assert not mock_run.called + # A new brick: + assert glusterfs.add_volume_bricks("Newvolume1", ["host:/new1"]) + assert mock_run.called + + # Gluster call fails + mock_run.return_value = xml_command_fail + assert not glusterfs.add_volume_bricks("Newvolume1", ["new:/path"]) - # Test with xml output structure from v3.12 - mock_run = MagicMock(return_value=xml_op_version_312) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): - self.assertEqual(glusterfs.get_op_version("test"), "30707") - # 'get_max_op_version' function tests: 1 +# 'get_op_version' function tests: 1 + - def test_get_max_op_version(self): - """ - Test retrieving the glusterfs max-op-version. - """ +def test_get_op_version(): + """ + Test retrieving the glusterfs op-version + """ - mock_xml = MagicMock(return_value=xml_max_op_version) - mock_version = MagicMock(return_value="glusterfs 3.9.1") + # Test with xml output structure from v3.7 + mock_run = MagicMock(return_value=xml_op_version_37) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.get_op_version("test") == "30707" + + # Test with xml output structure from v3.12 + mock_run = MagicMock(return_value=xml_op_version_312) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_run}): + assert glusterfs.get_op_version("test") == "30707" - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): - self.assertFalse(glusterfs.get_max_op_version()[0]) - with patch.object(glusterfs, "_get_version", return_value=(3, 12, 0)): - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_xml}): - self.assertEqual(glusterfs.get_max_op_version(), "31200") +# 'get_max_op_version' function tests: 1 + + +def test_get_max_op_version(): + """ + Test retrieving the glusterfs max-op-version. + """ - # 'set_op_version' function tests: 1 + mock_xml = MagicMock(return_value=xml_max_op_version) + mock_version = MagicMock(return_value="glusterfs 3.9.1") - def test_set_op_version(self): - """ - Test setting the glusterfs op-version - """ - mock_failure = MagicMock(return_value=xml_set_op_version_failure) - mock_success = MagicMock(return_value=xml_set_op_version_success) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_version}): + assert not glusterfs.get_max_op_version()[0] + + with patch.object(glusterfs, "_get_version", return_value=(3, 12, 0)): + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_xml}): + assert glusterfs.get_max_op_version() == "31200" + + +# 'set_op_version' function tests: 1 + + +def test_set_op_version(): + """ + Test setting the glusterfs op-version + """ + mock_failure = MagicMock(return_value=xml_set_op_version_failure) + mock_success = MagicMock(return_value=xml_set_op_version_success) - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_failure}): - self.assertFalse(glusterfs.set_op_version(30707)[0]) + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_failure}): + assert not glusterfs.set_op_version(30707)[0] - with patch.dict(glusterfs.__salt__, {"cmd.run": mock_success}): - self.assertEqual(glusterfs.set_op_version(31200), "Set volume successful") + with patch.dict(glusterfs.__salt__, {"cmd.run": mock_success}): + assert glusterfs.set_op_version(31200) == "Set volume successful" diff --git a/tests/pytests/unit/modules/test_gpg.py b/tests/pytests/unit/modules/test_gpg.py index 7ac084e612bc..697f53c65418 100644 --- a/tests/pytests/unit/modules/test_gpg.py +++ b/tests/pytests/unit/modules/test_gpg.py @@ -15,13 +15,14 @@ import pytest import salt.modules.gpg as gpg -from tests.support.mock import MagicMock, patch +from tests.support.mock import MagicMock, call, patch pytest.importorskip("gnupg") pytestmark = [ pytest.mark.skip_unless_on_linux, pytest.mark.requires_random_entropy, + pytest.mark.slow_test, ] log = logging.getLogger(__name__) @@ -879,13 +880,27 @@ def test_search_keys(gpghome): } ] + mock_search_keys = MagicMock(return_value=_search_result) mock_opt = MagicMock(return_value="root") with patch.dict(gpg.__salt__, {"user.info": MagicMock(return_value=_user_mock)}): with patch.dict(gpg.__salt__, {"config.option": mock_opt}): - with patch.object(gpg, "_search_keys", return_value=_search_result): + with patch.object(gpg, "_search_keys", mock_search_keys): ret = gpg.search_keys("person@example.com") assert ret == _expected_result + assert ( + call("person@example.com", "keys.openpgp.org", None) + in mock_search_keys.mock_calls + ) + + ret = gpg.search_keys("person@example.com", "keyserver.ubuntu.com") + assert ret == _expected_result + + assert ( + call("person@example.com", "keyserver.ubuntu.com", None) + in mock_search_keys.mock_calls + ) + def test_gpg_import_pub_key(gpghome): config_user = MagicMock(return_value="salt") diff --git a/tests/pytests/unit/modules/test_haproxyconn.py b/tests/pytests/unit/modules/test_haproxyconn.py new file mode 100644 index 000000000000..7834818fbeaf --- /dev/null +++ b/tests/pytests/unit/modules/test_haproxyconn.py @@ -0,0 +1,214 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.haproxyconn +""" + + +import pytest + +import salt.modules.haproxyconn as haproxyconn + + +class Mockcmds: + """ + Mock of cmds + """ + + def __init__(self): + self.backend = None + self.server = None + self.weight = None + + def listServers(self, backend): + """ + Mock of listServers method + """ + self.backend = backend + return ( + "Name: server01 Status: UP Weight: 1 bIn: 22 bOut: 12\n" + "Name: server02 Status: MAINT Weight: 2 bIn: 0 bOut: 0" + ) + + def enableServer(self, server, backend): + """ + Mock of enableServer method + """ + self.backend = backend + self.server = server + return "server enabled" + + def disableServer(self, server, backend): + """ + Mock of disableServer method + """ + self.backend = backend + self.server = server + return "server disabled" + + def getWeight(self, server, backend, weight=0): + """ + Mock of getWeight method + """ + self.backend = backend + self.server = server + self.weight = weight + return "server weight" + + @staticmethod + def showFrontends(): + """ + Mock of showFrontends method + """ + return "frontend-alpha\nfrontend-beta\nfrontend-gamma" + + @staticmethod + def showBackends(): + """ + Mock of showBackends method + """ + return "backend-alpha\nbackend-beta\nbackend-gamma" + + +class Mockhaproxy: + """ + Mock of haproxy + """ + + def __init__(self): + self.cmds = Mockcmds() + + +class MockHaConn: + """ + Mock of HaConn + """ + + def __init__(self, socket=None): + self.ha_cmd = None + + def sendCmd(self, ha_cmd, objectify=False): + """ + Mock of sendCmd method + """ + self.ha_cmd = ha_cmd + self.objectify = objectify + return ha_cmd + + +@pytest.fixture +def configure_loader_modules(): + return {haproxyconn: {"haproxy": Mockhaproxy(), "_get_conn": MockHaConn}} + + +# 'list_servers' function tests: 1 + + +def test_list_servers(): + """ + Test list_servers + """ + assert haproxyconn.list_servers("mysql") + + +# 'enable_server' function tests: 1 + + +def test_enable_server(): + """ + Test enable_server + """ + assert haproxyconn.enable_server("web1.salt.com", "www") + + +# 'disable_server' function tests: 1 + + +def test_disable_server(): + """ + Test disable_server + """ + assert haproxyconn.disable_server("db1.salt.com", "mysql") + + +# 'get_weight' function tests: 1 + + +def test_get_weight(): + """ + Test get the weight of a server + """ + assert haproxyconn.get_weight("db1.salt.com", "mysql") + + +# 'set_weight' function tests: 1 + + +def test_set_weight(): + """ + Test setting the weight of a given server + """ + assert haproxyconn.set_weight("db1.salt.com", "mysql", weight=11) + + +# 'show_frontends' function tests: 1 + + +def test_show_frontends(): + """ + Test print all frontends received from the HAProxy socket + """ + assert haproxyconn.show_frontends() + + +def test_list_frontends(): + """ + Test listing all frontends + """ + assert sorted(haproxyconn.list_frontends()) == sorted( + ["frontend-alpha", "frontend-beta", "frontend-gamma"] + ) + + +# 'show_backends' function tests: 1 + + +def test_show_backends(): + """ + Test print all backends received from the HAProxy socket + """ + assert haproxyconn.show_backends() + + +def test_list_backends(): + """ + Test listing of all backends + """ + assert sorted(haproxyconn.list_backends()) == sorted( + ["backend-alpha", "backend-beta", "backend-gamma"] + ) + + +def test_get_backend(): + """ + Test get_backend and compare returned value + """ + expected_data = { + "server01": {"status": "UP", "weight": 1, "bin": 22, "bout": 12}, + "server02": {"status": "MAINT", "weight": 2, "bin": 0, "bout": 0}, + } + assert haproxyconn.get_backend("test") == expected_data + + +def test_wait_state_true(): + """ + Test a successful wait for state + """ + assert haproxyconn.wait_state("test", "server01") + + +def test_wait_state_false(): + """ + Test a failed wait for state, with a timeout of 0 + """ + assert not haproxyconn.wait_state("test", "server02", "up", 0) diff --git a/tests/pytests/unit/modules/test_hashutil.py b/tests/pytests/unit/modules/test_hashutil.py new file mode 100644 index 000000000000..c91e99ce6b70 --- /dev/null +++ b/tests/pytests/unit/modules/test_hashutil.py @@ -0,0 +1,86 @@ +""" + Test cases for salt.modules.hashutil +""" + + +import pytest + +import salt.modules.hashutil as hashutil + + +@pytest.fixture +def the_string(): + return "get salted" + + +@pytest.fixture +def the_string_base64(): + return "Z2V0IHNhbHRlZA==\n" + + +@pytest.fixture +def the_string_md5(): + return "2aacf29e92feaf528fb738bcf9d647ac" + + +@pytest.fixture +def the_string_sha256(): + return "d49859ccbc854fa68d800b5734efc70d72383e6479d545468bc300263164ff33" + + +@pytest.fixture +def the_string_sha512(): + return "a8c174a7941c64a068e686812a2fafd7624c840fde800f5965fbeca675f2f6e37061ffe41e17728c919bdea290eab7a21e13c04ae71661955a87f2e0e04bb045" + + +@pytest.fixture +def the_string_hmac(): + return "eBWf9bstXg+NiP5AOwppB5HMvZiYMPzEM9W5YMm/AmQ=" + + +@pytest.fixture +def the_string_hmac_compute(): + return "78159ff5bb2d5e0f8d88fe403b0a690791ccbd989830fcc433d5b960c9bf0264" + + +@pytest.fixture +def the_string_github(): + return "sha1=b06aa56bdf4935eec82c4e53e83ed03f03fdb32d" + + +@pytest.fixture +def configure_loader_modules(): + return {hashutil: {}} + + +def test_base64_encodestring(the_string, the_string_base64): + assert hashutil.base64_encodestring(the_string) == the_string_base64 + + +def test_base64_decodestring(the_string, the_string_base64): + assert hashutil.base64_decodestring(the_string_base64) == the_string + + +@pytest.mark.skip_on_fips_enabled_platform +def test_md5_digest(the_string, the_string_md5): + assert hashutil.md5_digest(the_string) == the_string_md5 + + +def test_sha256_digest(the_string, the_string_sha256): + assert hashutil.sha256_digest(the_string) == the_string_sha256 + + +def test_sha512_digest(the_string, the_string_sha512): + assert hashutil.sha512_digest(the_string) == the_string_sha512 + + +def test_hmac_signature(the_string, the_string_hmac): + assert hashutil.hmac_signature(the_string, "shared secret", the_string_hmac) + + +def test_hmac_compute(the_string, the_string_hmac_compute): + assert hashutil.hmac_compute(the_string, "shared secret") + + +def test_github_signature(the_string, the_string_github): + assert hashutil.github_signature(the_string, "shared secret", the_string_github) diff --git a/tests/pytests/unit/modules/test_junos.py b/tests/pytests/unit/modules/test_junos.py new file mode 100644 index 000000000000..616f15f1d8ac --- /dev/null +++ b/tests/pytests/unit/modules/test_junos.py @@ -0,0 +1,2898 @@ +""" + :codeauthor: Rajvi Dhimar +""" +import os + +import pytest +import yaml + +import salt.modules.junos as junos +from tests.support.mock import ANY, MagicMock, PropertyMock, call, mock_open, patch + +try: + from lxml import etree +except ImportError: + import xml.etree.ElementTree as etree + +try: + import jnpr.junos.op as tables_dir + import jxmlease # pylint: disable=unused-import + from jnpr.junos.device import Device + from jnpr.junos.exception import ConnectClosedError, LockError, UnlockError + from jnpr.junos.utils.config import Config + from jnpr.junos.utils.sw import SW + + HAS_JUNOS = True +except ImportError: + HAS_JUNOS = False + +pytestmark = [ + pytest.mark.skip_on_windows(reason="Not supported on Windows"), + pytest.mark.skipif( + not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" + ), +] + + +@pytest.fixture +def get_facts(): + facts = { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + } + return facts + + +@pytest.fixture +def make_connect(): + with patch("ncclient.manager.connect") as mock_connect: + dev = Device( + host="1.1.1.1", + user="test", + password="test123", + fact_style="old", + gather_facts=False, + ) + dev.open() + dev.timeout = 30 + dev.bind(cu=Config) + dev.bind(sw=SW) + yield dev + + +@pytest.fixture +def configure_loader_modules(get_facts, make_connect): + return { + junos: { + "__proxy__": { + "junos.conn": MagicMock(return_value=make_connect), + "junos.get_serialized_facts": MagicMock(return_value=get_facts), + "junos.reboot_active": MagicMock(return_value=True), + "junos.reboot_clear": MagicMock(return_value=True), + }, + "__salt__": { + "cp.get_template": MagicMock(return_value=True), + "cp.get_file": MagicMock(return_value=True), + "file.file_exists": MagicMock(return_value=True), + "slsutil.renderer": MagicMock( + return_value="set system host-name dummy" + ), + "event.fire_master": MagicMock(return_value=None), + }, + "_restart_connection": MagicMock(return_value=None), + }, + } + + +def raise_exception(*args, **kwargs): + raise Exception("Test exception") + + +def test__timeout_decorator(): + with patch("jnpr.junos.Device.timeout", new_callable=PropertyMock) as mock_timeout: + mock_timeout.return_value = 30 + + def function(x): + return x + + decorator = junos._timeout_decorator(function) + decorator("Test Mock", dev_timeout=10) + calls = [call(), call(10), call(30)] + mock_timeout.assert_has_calls(calls) + + +def test__timeout_cleankwargs_decorator(): + with patch("jnpr.junos.Device.timeout", new_callable=PropertyMock) as mock_timeout: + mock_timeout.return_value = 30 + + def function(x): + return x + + decorator = junos._timeout_decorator_cleankwargs(function) + decorator("Test Mock", dev_timeout=10, __pub_args="abc") + calls = [call(), call(10), call(30)] + mock_timeout.assert_has_calls(calls) + + +def test_facts_refresh(): + with patch("salt.modules.saltutil.sync_grains") as mock_sync_grains: + ret = { + "out": True, + "facts": { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + }, + } + assert junos.facts_refresh() == ret + + +def test_facts_refresh_exception(): + with patch("jnpr.junos.device.Device.facts_refresh") as mock_facts_refresh: + mock_facts_refresh.side_effect = raise_exception + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } + assert junos.facts_refresh() == ret + + +def test_facts(): + ret = { + "out": True, + "facts": { + "2RE": True, + "HOME": "/var/home/regress", + "RE0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", + }, + "RE1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", + }, + "RE_hw_mi": False, + "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], + "domain": "englab.juniper.net", + "fqdn": "R1_re0.englab.juniper.net", + "hostname": "R1_re0", + "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, + "ifd_style": "CLASSIC", + "junos_info": { + "re0": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + "re1": { + "object": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "text": "16.1I20160413_0837_aamish", + }, + }, + "master": "RE0", + "model": "MX240", + "model_info": {"re0": "MX240", "re1": "MX240"}, + "personality": "MX", + "re_info": { + "default": { + "0": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + "1": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "backup", + "model": "RE-VMX", + "status": "OK", + }, + "default": { + "last_reboot_reason": "0x200:normal shutdown", + "mastership_state": "master", + "model": "RE-VMX", + "status": "OK", + }, + } + }, + "re_master": {"default": "0"}, + "serialnumber": "VMX4eaf", + "srx_cluster": None, + "switch_style": "BRIDGE_DOMAIN", + "vc_capable": False, + "vc_fabric": None, + "vc_master": None, + "vc_mode": None, + "version": "16.1I20160413_0837_aamish", + "version_RE0": "16.1I20160413_0837_aamish", + "version_RE1": "16.1I20160413_0837_aamish", + "version_info": { + "build": None, + "major": (16, 1), + "minor": "20160413_0837_aamish", + "type": "I", + }, + "virtual": True, + }, + } + assert junos.facts() == ret + + +def test_facts_exception(): + with patch.dict(junos.__proxy__, {"junos.get_serialized_facts": raise_exception}): + ret = { + "message": 'Could not display facts due to "Test exception"', + "out": False, + } + assert junos.facts() == ret + + +def test_set_hostname_without_args(): + ret = { + "message": "Please provide the hostname.", + "out": False, + } + assert junos.set_hostname() == ret + + +def test_set_hostname_load_called_with_valid_name(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load: + junos.set_hostname("test-name") + mock_load.assert_called_with("set system host-name test-name", format="set") + + +def test_set_hostname_raise_exception_for_load(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load: + mock_load.side_effect = raise_exception + ret = { + "message": 'Could not load configuration due to error "Test exception"', + "out": False, + } + assert junos.set_hostname("Test-name") == ret + + +def test_set_hostname_raise_exception_for_commit_check(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.side_effect = raise_exception + ret = { + "message": 'Could not commit check due to error "Test exception"', + "out": False, + } + assert junos.set_hostname("test-name") == ret + + +def test_set_hostname_one_arg_parsed_correctly(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": ["test-name", {"comment": "Committed via salt"}], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + junos.set_hostname("test-name", **args) + mock_commit.assert_called_with(comment="Committed via salt") + + +def test_set_hostname_more_than_one_args_parsed_correctly(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": [ + "test-name", + {"comment": "Committed via salt", "confirm": 5}, + ], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + junos.set_hostname("test-name", **args) + mock_commit.assert_called_with(comment="Committed via salt", confirm=5) + + +def test_set_hostname_successful_return_message(): + with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "Committed via salt", + "__pub_user": "root", + "__pub_arg": ["test-name", {"comment": "Committed via salt"}], + "__pub_fun": "junos.set_hostname", + "__pub_jid": "20170220210915624885", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "Successfully changed hostname.", + "out": True, + } + assert junos.set_hostname("test-name", **args) == ret + + +def test_set_hostname_raise_exception_for_commit(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit: + mock_commit.side_effect = raise_exception + ret = { + "message": 'Successfully loaded host-name but commit failed with "Test exception"', + "out": False, + } + assert junos.set_hostname("test-name") == ret + + +def test_set_hostname_fail_commit_check(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch("salt.modules.junos.rollback") as mock_rollback: + mock_commit_check.return_value = False + ret = { + "message": "Successfully loaded host-name but pre-commit check failed.", + "out": False, + } + assert junos.set_hostname("test") == ret + + +def test_commit_without_args(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit.return_value = True + mock_commit_check.return_value = True + ret = { + "message": "Commit Successful.", + "out": True, + } + assert junos.commit() == ret + + +def test_commit_raise_commit_check_exception(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.side_effect = raise_exception + ret = { + "message": 'Could not perform commit check due to "Test exception"', + "out": False, + } + assert junos.commit() == ret + + +def test_commit_raise_commit_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = { + "message": 'Commit check succeeded but actual commit failed with "Test exception"', + "out": False, + } + assert junos.commit() == ret + + +def test_commit_with_single_argument(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [{"sync": True}], + "sync": True, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.commit(**args) + mock_commit.assert_called_with(detail=False, sync=True) + + +def test_commit_with_multiple_arguments(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit_check.return_value = True + args = { + "comment": "comitted via salt", + "__pub_user": "root", + "__pub_arg": [ + {"comment": "comitted via salt", "confirm": 3, "detail": True} + ], + "confirm": 3, + "detail": True, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182856987820", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.commit(**args) + mock_commit.assert_called_with( + comment="comitted via salt", detail=True, confirm=3 + ) + + +def test_commit_pyez_commit_returning_false(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit: + mock_commit.return_value = False + mock_commit_check.return_value = True + ret = { + "message": "Commit failed.", + "out": False, + } + assert junos.commit() == ret + + +def test_commit_pyez_commit_check_returns_false(): + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: + mock_commit_check.return_value = False + ret = { + "message": "Pre-commit check failed.", + "out": False, + } + assert junos.commit() == ret + + +def test_rollback_exception(): + with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: + mock_rollback.side_effect = raise_exception + ret = { + "message": 'Rollback failed due to "Test exception"', + "out": False, + } + assert junos.rollback() == ret + + +def test_rollback_without_args_success(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + mock_rollback.return_value = True + ret = { + "message": "Rollback successful", + "out": True, + } + assert junos.rollback() == ret + + +def test_rollback_without_args_fail(): + with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: + mock_rollback.return_value = False + ret = { + "message": "Rollback failed", + "out": False, + } + assert junos.rollback() == ret + + +def test_rollback_with_id(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + junos.rollback(id=5) + mock_rollback.assert_called_with(5) + + +def test_rollback_with_id_and_single_arg(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [2, {"confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221184518526067", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(id=2, **args) + mock_rollback.assert_called_with(2) + mock_commit.assert_called_with(confirm=2) + + +def test_rollback_with_id_and_multiple_args(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "comment": "Comitted via salt", + "__pub_user": "root", + "__pub_arg": [ + 2, + {"comment": "Comitted via salt", "dev_timeout": 40, "confirm": 1}, + ], + "confirm": 1, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221192708251721", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(id=2, **args) + mock_rollback.assert_called_with(2) + mock_commit.assert_called_with( + comment="Comitted via salt", confirm=1, dev_timeout=40 + ) + + +def test_rollback_with_only_single_arg(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "__pub_user": "root", + "__pub_arg": [{"sync": True}], + "sync": True, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221193615696475", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(**args) + mock_rollback.assert_called_once_with(0) + mock_commit.assert_called_once_with(sync=True) + + +def test_rollback_with_only_multiple_args_no_id(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + args = { + "comment": "Comitted via salt", + "__pub_user": "root", + "__pub_arg": [{"comment": "Comitted via salt", "confirm": 3, "sync": True}], + "confirm": 3, + "sync": True, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221193945996362", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rollback(**args) + mock_rollback.assert_called_with(0) + mock_commit.assert_called_once_with( + sync=True, confirm=3, comment="Comitted via salt" + ) + + +def test_rollback_with_diffs_file_option_when_diff_is_None(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff: + mock_commit_check.return_value = True + mock_diff.return_value = "diff" + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221205153884009", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + "diffs_file": "/home/regress/diff", + } + junos.rollback(**args) + mock_fopen.assert_called_with("/home/regress/diff", "w") + + +def test_rollback_with_diffs_file_option(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff: + mock_commit_check.return_value = True + mock_diff.return_value = None + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], + "confirm": 2, + "__pub_fun": "junos.rollback", + "__pub_jid": "20170221205153884009", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + "diffs_file": "/home/regress/diff", + } + junos.rollback(**args) + assert not mock_fopen.called + + +def test_rollback_commit_check_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.side_effect = raise_exception + ret = { + "message": 'Could not commit check due to "Test exception"', + "out": False, + } + assert junos.rollback() == ret + + +def test_rollback_commit_exception(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.commit" + ) as mock_commit, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = { + "message": 'Rollback successful but commit failed with error "Test exception"', + "out": False, + } + assert junos.rollback() == ret + + +def test_rollback_commit_check_fails(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.rollback" + ) as mock_rollback: + mock_commit_check.return_value = False + ret = { + "message": "Rollback successful but pre-commit check failed.", + "out": False, + } + assert junos.rollback() == ret + + +def test_diff_without_args(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + junos.diff() + mock_diff.assert_called_with(rb_id=0) + + +def test_diff_with_arg(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + junos.diff(id=2) + mock_diff.assert_called_with(rb_id=2) + + +def test_diff_exception(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: + mock_diff.side_effect = raise_exception + ret = { + "message": 'Could not get diff with error "Test exception"', + "out": False, + } + assert junos.diff() == ret + + +def test_ping_without_args(): + ret = { + "message": "Please specify the destination ip to ping.", + "out": False, + } + assert junos.ping() == ret + + +def test_ping(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + junos.ping("1.1.1.1") + args = mock_execute.call_args + rpc = b"1.1.1.15" + mydgm = etree.tostring(args[0][0]) + assert etree.tostring(args[0][0]) == rpc + + +def test_ping_ttl(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + args = { + "__pub_user": "sudo_drajvi", + "__pub_arg": ["1.1.1.1", {"ttl": 3}], + "__pub_fun": "junos.ping", + "__pub_jid": "20170306165237683279", + "__pub_tgt": "mac_min", + "ttl": 3, + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.ping("1.1.1.1", **args) + exec_args = mock_execute.call_args + rpc = b"1.1.1.135" + assert etree.tostring(exec_args[0][0]) == rpc + + +def test_ping_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } + assert junos.ping("1.1.1.1") == ret + + +def test_cli_without_args(): + ret = { + "message": "Please provide the CLI command to be executed.", + "out": False, + } + assert junos.cli() == ret + + +def test_cli_with_format_as_empty_string(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + junos.cli("show version", format="") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.return_vale = "CLI result" + ret = { + "message": "CLI result", + "out": True, + } + junos.cli("show version") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli_format_xml(): + with patch("salt.modules.junos.jxmlease.parse") as mock_jxml, patch( + "salt.modules.junos.etree.tostring" + ) as mock_to_string, patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.return_value = "test" + mock_jxml.return_value = "test" + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "xml"}], + "format": "xml", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "test", + "out": True, + } + assert junos.cli("show version", **args) == ret + mock_cli.assert_called_with("show version", "xml", warning=False) + mock_to_string.assert_called_once_with("test") + assert mock_jxml.called + + +def test_cli_exception_in_cli(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.side_effect = raise_exception + ret = { + "message": 'Execution failed due to "Test exception"', + "out": False, + } + assert junos.cli("show version") == ret + + +def test_cli_output_save(): + with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( + "salt.utils.files.fopen" + ) as mock_fopen: + mock_cli.return_value = "Test return" + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], + "format": "text", + "dest": "/path/to/file", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "Test return", + "out": True, + } + assert junos.cli("show version", **args) == ret + mock_fopen.assert_called_with("/path/to/file", "w") + mock_cli.assert_called_with("show version", "text", warning=False) + + +def test_cli_output_save_ioexception(): + with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( + "salt.utils.files.fopen" + ) as mock_fopen: + mock_cli.return_value = "Test return" + mock_fopen.side_effect = IOError() + args = { + "__pub_user": "root", + "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], + "format": "text", + "dest": "/path/to/file", + "__pub_fun": "junos.cli", + "__pub_jid": "20170221182531323467", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": 'Unable to open "/path/to/file" to write', + "out": False, + } + assert junos.cli("show version", **args) == ret + + +def test_shutdown_without_args(): + ret = { + "message": "Provide either one of the arguments: shutdown or reboot.", + "out": False, + } + assert junos.shutdown() == ret + + +def test_shutdown_with_reboot_args(): + with patch("salt.modules.junos.SW.reboot") as mock_reboot: + ret = { + "message": "Successfully powered off/rebooted.", + "out": True, + } + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + assert mock_reboot.called + + +def test_shutdown_with_poweroff_args(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + ret = { + "message": "Successfully powered off/rebooted.", + "out": True, + } + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + assert mock_poweroff.called + + +def test_shutdown_with_shutdown_as_false(): + ret = { + "message": "Nothing to be done.", + "out": False, + } + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": False}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + assert junos.shutdown(**args) == ret + + +def test_shutdown_with_in_min_arg(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + args = { + "__pub_user": "root", + "in_min": 10, + "__pub_arg": [{"in_min": 10, "shutdown": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222231445709212", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.shutdown(**args) + mock_poweroff.assert_called_with(in_min=10) + + +def test_shutdown_with_at_arg(): + with patch("salt.modules.junos.SW.reboot") as mock_reboot: + args = { + "__pub_user": "root", + "__pub_arg": [{"at": "12:00 pm", "reboot": True}], + "reboot": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "201702276857", + "at": "12:00 pm", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.shutdown(**args) + mock_reboot.assert_called_with(at="12:00 pm") + + +def test_shutdown_fail_with_exception(): + with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: + mock_poweroff.side_effect = raise_exception + args = { + "__pub_user": "root", + "__pub_arg": [{"shutdown": True}], + "shutdown": True, + "__pub_fun": "junos.shutdown", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": 'Could not poweroff/reboot because "Test exception"', + "out": False, + } + assert junos.shutdown(**args) == ret + + +def test_install_config_without_args(): + ret = { + "message": "Please provide the salt path where the configuration is present", + "out": False, + } + assert junos.install_config() == ret + + +def test_install_config_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = { + "message": "Invalid file path.", + "out": False, + } + assert junos.install_config("path") == ret + + +def test_install_config_file_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = { + "message": "Invalid file path.", + "out": False, + } + assert junos.install_config("path") == ret + + +def test_install_config(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config.set") == ret + mock_load.assert_called_with(path="test/path/config", format="set") + + +def test_install_config_xml_file(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config.xml") == ret + mock_load.assert_called_with(path="test/path/config", format="xml") + + +def test_install_config_text_file(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config") == ret + mock_load.assert_called_with(path="test/path/config", format="text") + + +def test_install_config_cache_not_exists(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value=None), + "file.rmdir": MagicMock(return_value="True"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "tempfile.mkdtemp" + ) as mock_mkdtemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_mkdtemp.return_value = "/tmp/argr5351afd" + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert ( + junos.install_config("salt://actual/path/config", template_vars=True) + == ret + ) + mock_mkstemp.assert_called_with() + + +def test_install_config_replace(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"replace": True}], + "replace": True, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config.set", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="set", merge=False + ) + + +def test_install_config_overwrite(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"overwrite": True}], + "overwrite": True, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config.xml", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="xml", overwrite=True + ) + + +def test_install_config_overwrite_false(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"overwrite": False}], + "overwrite": False, + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("salt://actual/path/config", **args) == ret + mock_load.assert_called_with( + path="test/path/config", format="text", merge=True + ) + + +def test_install_config_load_causes_exception(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_load.side_effect = raise_exception + ret = { + "message": 'Could not load configuration due to : "Test exception"', + "out": False, + "format": "set", + } + assert junos.install_config(path="actual/path/config.set") == ret + + +def test_install_config_no_diff(): + with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = None + ret = { + "message": "Configuration already applied!", + "out": True, + } + assert junos.install_config("actual/path/config") == ret + + +def test_install_config_write_diff(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "copy/config/here"}], + "diffs_file": "copy/config/here", + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("actual/path/config", **args) == ret + mock_fopen.assert_called_with("copy/config/here", "w") + + +def test_install_config_write_diff_exception(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as mock_fopen, patch( + "salt.utils.stringutils.to_str" + ) as mock_strgutils, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_strgutils.side_effect = raise_exception + + args = { + "__pub_user": "root", + "__pub_arg": [{"diffs_file": "copy/config/here"}], + "diffs_file": "copy/config/here", + "__pub_fun": "junos.install_config", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + + ret = { + "message": "Could not write into diffs_file due to: 'Test exception'", + "out": False, + } + assert junos.install_config("actual/path/config", **args) == ret + + +def test_install_config_commit_params(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + args = { + "comment": "comitted via salt", + "__pub_user": "root", + "__pub_arg": [{"comment": "comitted via salt", "confirm": 3}], + "confirm": 3, + "__pub_fun": "junos.commit", + "__pub_jid": "20170221182856987820", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "Successfully loaded and committed!", + "out": True, + } + assert junos.install_config("actual/path/config", **args) == ret + mock_commit.assert_called_with(comment="comitted via salt", confirm=3) + + +def test_install_config_commit_check_fails(): + with patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = False + + ret = { + "message": "Loaded configuration but commit check failed, hence rolling back configuration.", + "out": False, + } + assert junos.install_config("actual/path/config.xml") == ret + + +def test_install_config_commit_exception(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + mock_commit.side_effect = raise_exception + ret = { + "message": 'Commit check successful but commit failed with "Test exception"', + "out": False, + } + assert junos.install_config("actual/path/config") == ret + + +def test_install_config_test_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = { + "message": "Commit check passed, but skipping commit for dry-run and rolling back configuration.", + "out": True, + } + assert junos.install_config("actual/path/config", test=True) == ret + mock_commit.assert_not_called() + + +def test_install_config_write_diff_dynamic_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = { + "message": "Write diff is not supported with dynamic/ephemeral configuration mode", + "out": False, + } + assert ( + junos.install_config( + "actual/path/config", mode="dynamic", diffs_file="/path/to/dif" + ) + == ret + ) + mock_commit.assert_not_called() + + +def test_install_config_unknown_mode(): + with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( + "jnpr.junos.utils.config.Config.commit_check" + ) as mock_commit_check, patch( + "jnpr.junos.utils.config.Config.diff" + ) as mock_diff, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.getsize" + ) as mock_getsize: + mock_isfile.return_value = True + mock_getsize.return_value = 10 + mock_mkstemp.return_value = "test/path/config" + mock_diff.return_value = "diff" + mock_commit_check.return_value = True + ret = { + "message": "install_config failed due to: unsupported action: abcdef", + "out": False, + } + assert junos.install_config("actual/path/config", mode="abcdef") == ret + mock_commit.assert_not_called() + + +def test_zeroize(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + result = junos.zeroize() + mock_cli.assert_called_once_with("request system zeroize") + ret = { + "message": "Completed zeroize and rebooted", + "out": True, + } + assert result == ret + + +def test_zeroize_throw_exception(): + with patch("jnpr.junos.device.Device.cli") as mock_cli: + mock_cli.side_effect = raise_exception + ret = { + "message": 'Could not zeroize due to : "Test exception"', + "out": False, + } + assert junos.zeroize() == ret + + +def test_install_os_without_args(): + ret = { + "message": "Please provide the salt path where the junos image is present.", + "out": False, + } + assert junos.install_os() == ret + + +def test_install_os_cp_fails(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="xxxx"), + "file.rmdir": MagicMock(return_value="True"), + }, + ): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = False + mock_install.return_value = ( + False, + "Invalid path. Please provide a valid image path", + ) + ret = { + "message": "Installation failed. Reason: Invalid path. Please provide a valid image path", + "out": False, + } + assert junos.install_os("salt://image/path/") == ret + + +def test_install_os_image_cp_fails(): + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + ret = { + "message": "Invalid path. Please provide a valid image path", + "out": False, + } + assert junos.install_os("/image/path/") == ret + + +def test_install_os(): + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="test/path/config"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = { + "message": "Installed the os.", + "out": True, + } + assert junos.install_os("path") == ret + + +def test_install_os_failure(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = False, "because we are testing failure" + ret = { + "message": "Installation failed. Reason: because we are testing failure", + "out": False, + } + assert junos.install_os("path") == ret + + +def test_install_os_with_reboot_arg(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "jnpr.junos.utils.sw.SW.reboot" + ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.install_os", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": "Successfully installed and rebooted!", + "out": True, + } + assert junos.install_os("path", **args) == ret + + +def test_install_os_pyez_install_throws_exception(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.side_effect = raise_exception + ret = { + "message": 'Installation failed due to: "Test exception"', + "out": False, + } + assert junos.install_os("path") == ret + + +def test_install_os_with_reboot_raises_exception(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "jnpr.junos.utils.sw.SW.reboot" + ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + mock_reboot.side_effect = raise_exception + args = { + "__pub_user": "root", + "__pub_arg": [{"reboot": True}], + "reboot": True, + "__pub_fun": "junos.install_os", + "__pub_jid": "20170222213858582619", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + ret = { + "message": 'Installation successful but reboot failed due to : "Test exception"', + "out": False, + } + assert junos.install_os("path", **args) == ret + + +def test_install_os_no_copy(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = { + "message": "Installed the os.", + "out": True, + } + assert junos.install_os("path", no_copy=True) == ret + mock_install.assert_called_with( + "path", no_copy=True, progress=True, timeout=1800 + ) + mock_mkstemp.assert_not_called() + mock_safe_rm.assert_not_called() + + +def test_install_os_issu(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = { + "message": "Installed the os.", + "out": True, + } + assert junos.install_os("path", issu=True) == ret + mock_install.assert_called_with(ANY, issu=True, progress=True, timeout=1800) + + +def test_install_os_add_params(): + with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( + "salt.utils.files.safe_rm" + ) as mock_safe_rm, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstemp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "os.path.getsize" + ) as mock_getsize: + mock_getsize.return_value = 10 + mock_isfile.return_value = True + mock_install.return_value = True, "installed" + ret = { + "message": "Installed the os.", + "out": True, + } + remote_path = "/path/to/file" + assert ( + junos.install_os("path", remote_path=remote_path, nssu=True, validate=True) + == ret + ) + mock_install.assert_called_with( + ANY, + nssu=True, + remote_path=remote_path, + progress=True, + validate=True, + timeout=1800, + ) + + +def test_file_copy_without_args(): + pytest.raises(TypeError, junos.file_copy) + + +@patch("paramiko.SSHClient") +@patch("scp.SCPClient.put") +@patch("scp.SCPClient.__init__") +def test_file_copy_invalid_src(mock_scpclient, mock_put, mock_ssh): + mock_scpclient.return_value = None + invalid_path = "invalid/file/path" + mock_put.side_effect = Exception(invalid_path) + with patch("os.path.isfile") as mock_isfile: + mock_isfile.return_value = False + ret = { + "message": 'Could not copy file : "invalid/file/path"', + "out": False, + } + assert junos.file_copy(invalid_path, "file") == ret + + +def test_file_copy_without_dest(): + pytest.raises(TypeError, junos.file_copy, src="/home/user/config.set") + + +def test_file_copy(): + with patch("salt.modules.junos.SCP") as mock_scp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_isfile.return_value = True + ret = { + "message": "Successfully copied file from test/src/file to file", + "out": True, + } + assert junos.file_copy(dest="file", src="test/src/file") == ret + + +def test_file_copy_exception(): + with patch("salt.modules.junos.SCP") as mock_scp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_isfile.return_value = True + mock_scp.side_effect = raise_exception + ret = { + "message": 'Could not copy file : "Test exception"', + "out": False, + } + assert junos.file_copy(dest="file", src="test/src/file") == ret + + +# These test cases test the __virtual__ function, used internally by salt +# to check if the given module is loadable. This function is not used by +# an external user. + + +def test_virtual_proxy_unavailable(): + with patch.dict(junos.__opts__, {}): + res = ( + False, + "The junos or dependent module could not be loaded: " + "junos-eznc or jxmlease or yamlordereddictloader or " + "proxy could not be loaded.", + ) + assert junos.__virtual__() == res + + +def test_virtual_all_true(): + with patch.dict(junos.__opts__, {"proxy": "test"}): + assert junos.__virtual__() == "junos" + + +def test_rpc_without_args(): + ret = { + "message": "Please provide the rpc to execute.", + "out": False, + } + assert junos.rpc() == ret + + +def test_rpc_get_config_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = { + "message": 'RPC execution failed due to "Test exception"', + "out": False, + } + assert junos.rpc("get_config") == ret + + +def test_rpc_get_config_filter(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.return_value = etree.XML("") + args = { + "__pub_user": "root", + "__pub_arg": [ + "get-config", + {"filter": ""}, + ], + "__pub_fun": "junos.rpc", + "__pub_jid": "20170314162715866528", + "__pub_tgt": "mac_min", + "__pub_tgt_type": "glob", + "filter": "", + "__pub_ret": "", + } + junos.rpc("get-config", **args) + exec_args = mock_execute.call_args + expected_rpc = b'' + assert etree.tostring(exec_args[0][0]) == expected_rpc + + +def test_rpc_get_interface_information(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + junos.rpc("get-interface-information", format="json") + args = mock_execute.call_args + expected_rpc = b'' + assert etree.tostring(args[0][0]) == expected_rpc + + +def test_rpc_get_interface_information_with_kwargs(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + args = { + "__pub_user": "root", + "__pub_arg": [ + "get-interface-information", + "", + "text", + {"terse": True, "interface_name": "lo0", "format": "text"}, + ], + "format": "text", + "terse": True, + "__pub_fun": "junos.rpc", + "__pub_jid": "20170314160943363563", + "__pub_tgt": "mac_min", + "interface_name": "lo0", + "__pub_tgt_type": "glob", + "__pub_ret": "", + } + junos.rpc("get-interface-information", **args) + args = mock_execute.call_args + expected_rpc = b'lo0' + assert etree.tostring(args[0][0]) == expected_rpc + + +def test_rpc_get_chassis_inventory_filter_as_arg(): + with patch("salt.modules.junos.jxmlease.parse") as mock_jxmlease, patch( + "salt.modules.junos.etree.tostring" + ) as mock_tostring, patch( + "salt.modules.junos.logging.Logger.warning" + ) as mock_warning, patch( + "jnpr.junos.device.Device.execute" + ) as mock_execute: + junos.rpc( + "get-chassis-inventory", + filter="", + ) + mock_warning.assert_called_with( + 'Filter ignored as it is only used with "get-config" rpc' + ) + + +def test_rpc_get_interface_information_exception(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.side_effect = raise_exception + ret = { + "message": 'RPC execution failed due to "Test exception"', + "out": False, + } + assert junos.rpc("get_interface_information") == ret + + +def test_rpc_write_file_format_text(): + with patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_execute.return_value = etree.XML("text rpc reply") + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file", format="text") + writes = m_open.write_calls() + assert writes == ["text rpc reply"], writes + + +def test_rpc_write_file_format_json(): + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file", format="json") + writes = m_open.write_calls() + assert writes == ["json rpc reply"], writes + + +def test_rpc_write_file(): + with patch("salt.modules.junos.jxmlease.parse") as mock_parse, patch( + "salt.modules.junos.etree.tostring" + ) as mock_tostring, patch("jnpr.junos.device.Device.execute") as mock_execute: + mock_tostring.return_value = "xml rpc reply" + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: + junos.rpc("get-chassis-inventory", dest="/path/to/file") + writes = m_open.write_calls() + assert writes == ["xml rpc reply"], writes + + +def test_lock_success(): + ret_exp = {"out": True, "message": "Successfully locked the configuration."} + ret = junos.lock() + assert ret == ret_exp + + +def test_lock_error(): + ret_exp = {"out": False, "message": 'Could not gain lock due to : "LockError"'} + with patch("jnpr.junos.utils.config.Config.lock") as mock_lock: + mock_lock.side_effect = LockError(None) + ret = junos.lock() + assert ret == ret_exp + + +def test_unlock_success(): + ret_exp = {"out": True, "message": "Successfully unlocked the configuration."} + ret = junos.unlock() + assert ret == ret_exp + + +def test_unlock_error(): + ret_exp = { + "out": False, + "message": 'Could not unlock configuration due to : "UnlockError"', + } + with patch("jnpr.junos.utils.config.Config.unlock") as mock_unlock: + mock_unlock.side_effect = UnlockError(None) + ret = junos.unlock() + assert ret == ret_exp + + +def test_load_none_path(): + ret_exp = { + "out": False, + "message": ("Please provide the salt path where the configuration is present"), + } + ret = junos.load() + assert ret == ret_exp + + +def test_load_wrong_tmp_file(): + ret_exp = { + "out": False, + "message": ( + 'Could not load configuration due to : "[Errno 2] No such file or' + " directory: '/pat/to/tmp/file'\"" + ), + "format": "text", + } + with patch.dict( + junos.__salt__, + { + "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), + "cp.hash_file": MagicMock( + return_value={"hash_type": "sha256", "hsum": "a386e49c17"} + ), + "file.get_hash": MagicMock(return_value="a386e49c17"), + }, + ): + with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open, patch( + "os.path.getsize" + ) as mock_getsize, patch("salt.utils.files.mkstemp") as mock_mkstmp: + mock_mkstmp.return_value = "/pat/to/tmp/file" + mock_getsize.return_value = 1000 + ret = junos.load("salt://path/to/file") + assert ret == ret_exp + + +def test_load_invalid_path(): + with patch("salt.utils.files.mkstemp") as mock_mkstmp: + mock_mkstmp.return_value = "/path/to/file" + pytest.raises(FileNotFoundError, junos.load, path="/path/to/file") + + +def test_load_no_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file") + mock_load.assert_called_with(format="text", path="/path/to/file") + assert ret == ret_exp + + +def test_load_xml_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("os.path.isfile") as mock_isfile, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file.xml" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.xml") + mock_load.assert_called_with(format="xml", path="/path/to/file.xml") + assert ret == ret_exp + + +def test_load_xml_extension_with_kwargs(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile, patch( + "salt.utils.files.fopen" + ) as fopen, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp: + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.xml", template_vars=dict(hostname="test")) + mock_load.assert_called_with( + format="xml", path="/path/to/file", template_vars={"hostname": "test"} + ) + assert ret == ret_exp + + +def test_load_set_extension(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file.set" + mock_isfile.return_value = True + ret = junos.load("/path/to/file.set") + mock_load.assert_called_with(format="set", path="/path/to/file.set") + assert ret == ret_exp + + +def test_load_replace_true(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", replace=True) + mock_load.assert_called_with(format="text", merge=False, path="/path/to/file") + assert ret == ret_exp + + +def test_load_replace_false(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", replace=False) + mock_load.assert_called_with(format="text", replace=False, path="/path/to/file") + assert ret == ret_exp + + +def test_load_overwrite_true(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", overwrite=True) + mock_load.assert_called_with( + format="text", overwrite=True, path="/path/to/file" + ) + assert ret == ret_exp + + +def test_load_overwrite_false(): + ret_exp = {"out": True, "message": "Successfully loaded the configuration."} + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "salt.utils.files.mkstemp" + ) as mock_mkstmp, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + ret = junos.load("/path/to/file", overwrite=False) + mock_load.assert_called_with(format="text", merge=True, path="/path/to/file") + assert ret == ret_exp + + +def test_load_error(): + ret_exp = { + "out": False, + "format": "text", + "message": 'Could not load configuration due to : "Test Error"', + } + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch( + "os.path.isfile" + ) as mock_isfile: + mock_getsize.return_value = 1000 + mock_mkstmp.return_value = "/path/to/file" + mock_isfile.return_value = True + mock_load.side_effect = Exception("Test Error") + ret = junos.load("/path/to/file") + assert ret == ret_exp + + +def test_load_template(): + ret_exp = { + "out": True, + "message": "Successfully loaded the configuration.", + } + with patch("os.path.getsize") as mock_getsize, patch( + "jnpr.junos.utils.config.Config.load" + ) as mock_load: + ret = junos.load("tests/unit/modules/templates/basic2.j2", test=True) + assert ret == ret_exp + + +def test_commit_check_success(): + ret_exp = {"out": True, "message": "Commit check succeeded."} + ret = junos.commit_check() + assert ret == ret_exp + + +def test_commit_check_error(): + ret_exp = {"out": False, "message": "Commit check failed with "} + with patch("jnpr.junos.utils.config.Config.commit_check") as mock_check: + mock_check.side_effect = Exception + ret = junos.commit_check() + assert ret == ret_exp + + +def test_get_table_wrong_path(): + table = "ModuleTable" + file = "sample.yml" + path = "/path/to/file" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Given table file {} cannot be located".format(file), + } + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( + "salt.utils.files.fopen" + ) as mock_fopen, patch("jnpr.junos.factory.FactoryLoader.load") as mock_load: + ret = junos.get_table(table, file, path) + assert ret == ret_exp + mock_load.assert_not_called() + + +def test_get_table_no_path_no_file(): + table = "ModuleTable" + file = "inventory.yml" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Given table file {} cannot be located".format(file), + } + with patch.dict( + junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} + ): + with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( + "glob.glob" + ) as mock_fopen: + mock_fopen.return_value = [] + ret = junos.get_table(table, file) + assert ret == ret_exp + mock_load.assert_not_called() + + +def test_get_table_yaml_load_error(): + table = "ModuleTable" + file = "inventory.yml" + path = "/path/to/file" + message = "File not located test" + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": "Uncaught exception during YAML Load - please report: {}".format( + message + ), + } + with patch("salt.utils.files.fopen", mock_open(), create=True) as mock_file, patch( + "glob.glob" + ) as mock_fopen, patch.object(yaml, "load") as mock_yamlload: + mock_fopen.return_value = ["/path/to/file"] + mock_yamlload.side_effect = OSError(message) + ret = junos.get_table(table, file, path) + assert ret == ret_exp + + +def test_get_table_api_error(): + table = "sample" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "sample", + "message": ( + "Uncaught exception during get API call - please report: '{}'".format( + str(table) + ) + ), + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "yaml.load" + ) as mock_yamlload, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open: + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] == ret_exp["out"] + assert ret["tablename"] == ret_exp["tablename"] + assert ret["message"] == ret_exp["message"] + + +def test_get_table_connect_closed_error(): + table = "ModuleTable" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + ret_exp = { + "out": False, + "hostname": "1.1.1.1", + "tablename": "ModuleTable", + "message": ( + "Got ConnectClosedError exception. Connection lost with Device(1.1.1.1)" + ), + } + with patch("jnpr.junos.factory.optable.OpTable.get") as mock_load, patch( + "yaml.load" + ) as mock_yamlload, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open: + dev = Device(host="1.1.1.1", user="rick") + mock_load.side_effect = ConnectClosedError(dev) + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] == ret_exp["out"] + assert ret["tablename"] == ret_exp["tablename"] + assert ret["message"] == ret_exp["message"] + + +def test_get_table_inventory(): + table = "ModuleTable" + file = "inventory.yml" + pyez_tables_path = os.path.dirname(os.path.abspath(tables_dir.__file__)) + path = pyez_tables_path + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch("yaml.load") as mock_yamlload, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file, path) + assert ret["out"] + + +def test_get_table_no_path_inventory(): + table = "ModuleTable" + file = "inventory.yml" + table_yamlload = { + "ModuleTable": { + "item": ( + ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" + ), + "key": "name", + "rpc": "get-chassis-inventory", + "view": "ModuleTableView", + }, + "ModuleTableView": { + "fields": { + "jname": "name", + "pn": "part-number", + "sn": "serial-number", + "type": "description", + "ver": "version", + }, + }, + } + with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( + "salt.utils.files.fopen", mock_open(), create=True + ) as m_open, patch("yaml.load") as mock_yamlload, patch( + "salt.utils.json.dumps" + ) as mock_dumps: + mock_dumps.return_value = "json rpc reply" + mock_yamlload.return_value = table_yamlload + ret = junos.get_table(table, file) + assert ret["out"] diff --git a/tests/pytests/unit/modules/test_keystone.py b/tests/pytests/unit/modules/test_keystone.py new file mode 100644 index 000000000000..83b611ff4e13 --- /dev/null +++ b/tests/pytests/unit/modules/test_keystone.py @@ -0,0 +1,958 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.keystone +""" + +import pytest + +import salt.modules.config as config +import salt.modules.keystone as keystone +from tests.support.mock import MagicMock, call, patch + + +class MockEC2: + """ + Mock of EC2 class + """ + + def __init__(self): + self.access = "" + self.secret = "" + self.tenant_id = "" + self.user_id = "" + self.connection_args = "" + self.profile = "" + + @staticmethod + def create(userid, tenantid): + """ + Mock of create method + """ + cr_ec2 = MockEC2() + cr_ec2.tenant_id = tenantid + cr_ec2.user_id = userid + return cr_ec2 + + def delete(self, userid, accesskey): + """ + Mock of delete method + """ + self.access = accesskey + self.user_id = userid + return True + + @staticmethod + def get(user_id, access, profile, **connection_args): + """ + Mock of get method + """ + cr_ec2 = MockEC2() + cr_ec2.profile = profile + cr_ec2.access = access + cr_ec2.user_id = user_id + cr_ec2.connection_args = connection_args + return cr_ec2 + + @staticmethod + def list(user_id): + """ + Mock of list method + """ + cr_ec2 = MockEC2() + cr_ec2.user_id = user_id + return [cr_ec2] + + +class MockEndpoints: + """ + Mock of Endpoints class + """ + + def __init__(self): + self.id = "007" + self.region = "RegionOne" + self.adminurl = "adminurl" + self.internalurl = "internalurl" + self.publicurl = "publicurl" + self.service_id = "117" + + @staticmethod + def list(): + """ + Mock of list method + """ + return [MockEndpoints()] + + @staticmethod + def create(region, service_id, publicurl, adminurl, internalurl): + """ + Mock of create method + """ + return (region, service_id, publicurl, adminurl, internalurl) + + @staticmethod + def delete(id): + """ + Mock of delete method + """ + return id + + +class MockServices: + """ + Mock of Services class + """ + + flag = None + + def __init__(self): + self.id = "117" + self.name = "iptables" + self.description = "description" + self.type = "type" + + @staticmethod + def create(name, service_type, description): + """ + Mock of create method + """ + service = MockServices() + service.id = "005" + service.name = name + service.description = description + service.type = service_type + return service + + def get(self, service_id): + """ + Mock of get method + """ + service = MockServices() + if self.flag == 1: + service.id = "asd" + return [service] + elif self.flag == 2: + service.id = service_id + return service + return [service] + + def list(self): + """ + Mock of list method + """ + service = MockServices() + if self.flag == 1: + service.id = "asd" + return [service] + return [service] + + @staticmethod + def delete(service_id): + """ + Mock of delete method + """ + return service_id + + +class MockRoles: + """ + Mock of Roles class + """ + + flag = None + + def __init__(self): + self.id = "113" + self.name = "nova" + self.user_id = "446" + self.tenant_id = "a1a1" + + @staticmethod + def create(name): + """ + Mock of create method + """ + return name + + def get(self, role_id): + """ + Mock of get method + """ + role = MockRoles() + if self.flag == 1: + role.id = None + return role + role.id = role_id + return role + + @staticmethod + def list(): + """ + Mock of list method + """ + return [MockRoles()] + + @staticmethod + def delete(role): + """ + Mock of delete method + """ + return role + + @staticmethod + def add_user_role(user_id, role_id, tenant_id): + """ + Mock of add_user_role method + """ + return (user_id, role_id, tenant_id) + + @staticmethod + def remove_user_role(user_id, role_id, tenant_id): + """ + Mock of remove_user_role method + """ + return (user_id, role_id, tenant_id) + + @staticmethod + def roles_for_user(user, tenant): + """ + Mock of roles_for_user method + """ + role = MockRoles() + role.user_id = user + role.tenant_id = tenant + return [role] + + +class MockTenants: + """ + Mock of Tenants class + """ + + flag = None + + def __init__(self): + self.id = "446" + self.name = "nova" + self.description = "description" + self.enabled = "True" + + @staticmethod + def create(name, description, enabled): + """ + Mock of create method + """ + tenant = MockTenants() + tenant.name = name + tenant.description = description + tenant.enabled = enabled + return tenant + + def get(self, tenant_id): + """ + Mock of get method + """ + tenant = MockTenants() + if self.flag == 1: + tenant.id = None + return tenant + tenant.id = tenant_id + return tenant + + @staticmethod + def list(): + """ + Mock of list method + """ + return [MockTenants()] + + @staticmethod + def delete(tenant_id): + """ + Mock of delete method + """ + return tenant_id + + +class MockServiceCatalog: + """ + Mock of ServiceCatalog class + """ + + def __init__(self): + self.id = "446" + self.expires = "No" + self.user_id = "admin" + self.tenant_id = "ae04" + + def get_token(self): + """ + Mock of get_token method + """ + return { + "id": self.id, + "expires": self.expires, + "user_id": self.user_id, + "tenant_id": self.tenant_id, + } + + +class MockUsers: + """ + Mock of Users class + """ + + flag = None + + def __init__(self): + self.id = "446" + self.name = "nova" + self.email = "salt@saltstack.com" + self.enabled = "True" + self.tenant_id = "a1a1" + self.password = "salt" + + def create(self, name, password, email, tenant_id, enabled): + """ + Mock of create method + """ + user = MockUsers() + user.name = name + user.password = password + user.email = email + user.enabled = enabled + self.tenant_id = tenant_id + return user + + def get(self, user_id): + """ + Mock of get method + """ + user = MockUsers() + if self.flag == 1: + user.id = None + return user + user.id = user_id + return user + + @staticmethod + def list(): + """ + Mock of list method + """ + return [MockUsers()] + + @staticmethod + def delete(user_id): + """ + Mock of delete method + """ + return user_id + + @staticmethod + def update(user, name, email, enabled): + """ + Mock of update method + """ + return (user, name, email, enabled) + + @staticmethod + def update_password(user, password): + """ + Mock of update_password method + """ + return (user, password) + + +class Unauthorized(Exception): + """ + The base exception class for all exceptions. + """ + + def __init__(self, message="Test"): + super().__init__(message) + self.msg = message + + +class AuthorizationFailure(Exception): + """ + Additional exception class to Unauthorized. + """ + + def __init__(self, message="Test"): + super().__init__(message) + self.msg = message + + +class MockExceptions: + """ + Mock of exceptions class + """ + + def __init__(self): + self.Unauthorized = Unauthorized + self.AuthorizationFailure = AuthorizationFailure + + +class MockKeystoneClient: + """ + Mock of keystoneclient module + """ + + def __init__(self): + self.exceptions = MockExceptions() + + +class MockClient: + """ + Mock of Client class + """ + + flag = None + + def __init__(self, profile=None, **conn_args): + self.ec2 = MockEC2() + self.endpoints = MockEndpoints() + self.services = MockServices() + self.roles = MockRoles() + self.tenants = MockTenants() + self.service_catalog = MockServiceCatalog() + self.users = MockUsers() + + def Client(self, **kwargs): + """ + Mock of Client method + """ + if self.flag == 1: + raise Unauthorized + return True + + +@pytest.fixture +def configure_loader_modules(): + return { + keystone: { + "auth": MockClient, + "client": MockClient(), + "keystoneclient": MockKeystoneClient(), + "__salt__": {"config.get": config.get}, + "__opts__": {}, + }, + config: {"__opts__": {}}, + } + + +def test_ec2_credentials_create(): + """ + Test if it create EC2-compatible credentials for user per tenant + """ + assert keystone.ec2_credentials_create() == {"Error": "Could not resolve User ID"} + + assert keystone.ec2_credentials_create(user_id="salt") == { + "Error": "Could not resolve Tenant ID" + } + + assert keystone.ec2_credentials_create(user_id="salt", tenant_id="72278") == { + "access": "", + "tenant_id": "72278", + "secret": "", + "user_id": "salt", + } + + +def test_ec2_credentials_delete(): + """ + Test if it delete EC2-compatible credentials + """ + assert keystone.ec2_credentials_delete() == {"Error": "Could not resolve User ID"} + + assert ( + keystone.ec2_credentials_delete(user_id="salt", access_key="72278") + == 'ec2 key "72278" deleted under user id "salt"' + ) + + +def test_ec2_credentials_get(): + """ + Test if it return ec2_credentials for a user + (keystone ec2-credentials-get) + """ + assert keystone.ec2_credentials_get() == {"Error": "Unable to resolve user id"} + + assert keystone.ec2_credentials_get(user_id="salt") == { + "Error": "Access key is required" + } + + assert keystone.ec2_credentials_get( + user_id="salt", access="72278", profile="openstack1" + ) == { + "salt": { + "access": "72278", + "secret": "", + "tenant": "", + "user_id": "salt", + } + } + + +def test_ec2_credentials_list(): + """ + Test if it return a list of ec2_credentials + for a specific user (keystone ec2-credentials-list) + """ + assert keystone.ec2_credentials_list() == {"Error": "Unable to resolve user id"} + + assert keystone.ec2_credentials_list(user_id="salt", profile="openstack1") == { + "salt": {"access": "", "secret": "", "tenant_id": "", "user_id": "salt"} + } + + +def test_endpoint_get(): + """ + Test if it return a specific endpoint (keystone endpoint-get) + """ + assert keystone.endpoint_get("nova", "RegionOne", profile="openstack") == { + "Error": "Could not find the specified service" + } + + ret = {"Error": "Could not find endpoint for the specified service"} + MockServices.flag = 1 + assert keystone.endpoint_get("iptables", "RegionOne", profile="openstack") == ret + + MockServices.flag = 0 + assert keystone.endpoint_get("iptables", "RegionOne", profile="openstack") == { + "adminurl": "adminurl", + "id": "007", + "internalurl": "internalurl", + "publicurl": "publicurl", + "region": "RegionOne", + "service_id": "117", + } + + +def test_endpoint_list(): + """ + Test if it return a list of available endpoints + (keystone endpoints-list) + """ + assert keystone.endpoint_list(profile="openstack1") == { + "007": { + "adminurl": "adminurl", + "id": "007", + "internalurl": "internalurl", + "publicurl": "publicurl", + "region": "RegionOne", + "service_id": "117", + } + } + + +def test_endpoint_create(): + """ + Test if it create an endpoint for an Openstack service + """ + assert keystone.endpoint_create("nova") == { + "Error": "Could not find the specified service" + } + + MockServices.flag = 2 + assert keystone.endpoint_create( + "iptables", + "http://public/url", + "http://internal/url", + "http://adminurl/url", + "RegionOne", + ) == { + "adminurl": "adminurl", + "id": "007", + "internalurl": "internalurl", + "publicurl": "publicurl", + "region": "RegionOne", + "service_id": "117", + } + + +def test_endpoint_delete(): + """ + Test if it delete an endpoint for an Openstack service + """ + ret = {"Error": "Could not find any endpoints for the service"} + assert keystone.endpoint_delete("nova", "RegionOne") == ret + + with patch.object( + keystone, "endpoint_get", MagicMock(side_effect=[{"id": "117"}, None]) + ): + assert keystone.endpoint_delete("iptables", "RegionOne") + + +def test_role_create(): + """ + Test if it create named role + """ + assert keystone.role_create("nova") == {"Error": 'Role "nova" already exists'} + + assert keystone.role_create("iptables") == {"Error": "Unable to resolve role id"} + + +def test_role_delete(): + """ + Test if it delete a role (keystone role-delete) + """ + assert keystone.role_delete() == {"Error": "Unable to resolve role id"} + + assert keystone.role_delete("iptables") == "Role ID iptables deleted" + + +def test_role_get(): + """ + Test if it return a specific roles (keystone role-get) + """ + assert keystone.role_get() == {"Error": "Unable to resolve role id"} + + assert keystone.role_get(name="nova") == {"nova": {"id": "113", "name": "nova"}} + + +def test_role_list(): + """ + Test if it return a list of available roles (keystone role-list) + """ + assert keystone.role_list() == { + "nova": { + "id": "113", + "name": "nova", + "tenant_id": "a1a1", + "user_id": "446", + } + } + + +def test_service_create(): + """ + Test if it add service to Keystone service catalog + """ + MockServices.flag = 2 + assert keystone.service_create("nova", "compute", "OpenStack Service") == { + "iptables": { + "description": "description", + "id": "005", + "name": "iptables", + "type": "type", + } + } + + +def test_service_delete(): + """ + Test if it delete a service from Keystone service catalog + """ + assert ( + keystone.service_delete("iptables") == 'Keystone service ID "iptables" deleted' + ) + + +def test_service_get(): + """ + Test if it return a list of available services (keystone services-list) + """ + MockServices.flag = 0 + assert keystone.service_get() == {"Error": "Unable to resolve service id"} + + MockServices.flag = 2 + assert keystone.service_get(service_id="c965") == { + "iptables": { + "description": "description", + "id": "c965", + "name": "iptables", + "type": "type", + } + } + + +def test_service_list(): + """ + Test if it return a list of available services (keystone services-list) + """ + MockServices.flag = 0 + assert keystone.service_list(profile="openstack1") == { + "iptables": { + "description": "description", + "id": "117", + "name": "iptables", + "type": "type", + } + } + + +def test_tenant_create(): + """ + Test if it create a keystone tenant + """ + assert keystone.tenant_create("nova") == { + "nova": { + "description": "description", + "id": "446", + "name": "nova", + "enabled": "True", + } + } + + +def test_tenant_delete(): + """ + Test if it delete a tenant (keystone tenant-delete) + """ + assert keystone.tenant_delete() == {"Error": "Unable to resolve tenant id"} + + assert keystone.tenant_delete("nova") == "Tenant ID nova deleted" + + +def test_tenant_get(): + """ + Test if it return a specific tenants (keystone tenant-get) + """ + assert keystone.tenant_get() == {"Error": "Unable to resolve tenant id"} + + assert keystone.tenant_get(tenant_id="446") == { + "nova": { + "description": "description", + "id": "446", + "name": "nova", + "enabled": "True", + } + } + + +def test_tenant_list(): + """ + Test if it return a list of available tenants (keystone tenants-list) + """ + assert keystone.tenant_list() == { + "nova": { + "description": "description", + "id": "446", + "name": "nova", + "enabled": "True", + } + } + + +def test_tenant_update(): + """ + Test if it update a tenant's information (keystone tenant-update) + """ + assert keystone.tenant_update() == {"Error": "Unable to resolve tenant id"} + + +def test_token_get(): + """ + Test if it return the configured tokens (keystone token-get) + """ + assert keystone.token_get() == { + "expires": "No", + "id": "446", + "tenant_id": "ae04", + "user_id": "admin", + } + + +def test_user_list(): + """ + Test if it return a list of available users (keystone user-list) + """ + assert keystone.user_list() == { + "nova": { + "name": "nova", + "tenant_id": "a1a1", + "enabled": "True", + "id": "446", + "password": "salt", + "email": "salt@saltstack.com", + } + } + + +def test_user_get(): + """ + Test if it return a specific users (keystone user-get) + """ + assert keystone.user_get() == {"Error": "Unable to resolve user id"} + + assert keystone.user_get(user_id="446") == { + "nova": { + "name": "nova", + "tenant_id": "a1a1", + "enabled": "True", + "id": "446", + "password": "salt", + "email": "salt@saltstack.com", + } + } + + +def test_user_create(): + """ + Test if it create a user (keystone user-create) + """ + assert keystone.user_create( + name="nova", + password="salt", + email="salt@saltstack.com", + tenant_id="a1a1", + ) == { + "nova": { + "name": "nova", + "tenant_id": "a1a1", + "enabled": "True", + "id": "446", + "password": "salt", + "email": "salt@saltstack.com", + } + } + + +def test_user_delete(): + """ + Test if it delete a user (keystone user-delete) + """ + assert keystone.user_delete() == {"Error": "Unable to resolve user id"} + + assert keystone.user_delete("nova") == "User ID nova deleted" + + +def test_user_update(): + """ + Test if it update a user's information (keystone user-update) + """ + assert keystone.user_update() == {"Error": "Unable to resolve user id"} + + assert keystone.user_update("nova") == "Info updated for user ID nova" + + +def test_user_verify_password(): + """ + Test if it verify a user's password + """ + mock = MagicMock(return_value="http://127.0.0.1:35357/v2.0") + with patch.dict(keystone.__salt__, {"config.option": mock}): + assert keystone.user_verify_password() == { + "Error": "Unable to resolve user name" + } + + assert keystone.user_verify_password(user_id="446", name="nova") + + MockClient.flag = 1 + assert not keystone.user_verify_password(user_id="446", name="nova") + + +def test_user_password_update(): + """ + Test if it update a user's password (keystone user-password-update) + """ + assert keystone.user_password_update() == {"Error": "Unable to resolve user id"} + + assert keystone.user_password_update("nova") == "Password updated for user ID nova" + + +def test_user_role_add(): + """ + Test if it add role for user in tenant (keystone user-role-add) + """ + assert ( + keystone.user_role_add(user="nova", tenant="nova", role="nova") + == '"nova" role added for user "nova" for "nova" tenant/project' + ) + + MockRoles.flag = 1 + assert keystone.user_role_add(user="nova", tenant="nova", role="nova") == { + "Error": "Unable to resolve role id" + } + + MockTenants.flag = 1 + assert keystone.user_role_add(user="nova", tenant="nova") == { + "Error": "Unable to resolve tenant/project id" + } + + MockUsers.flag = 1 + assert keystone.user_role_add(user="nova") == {"Error": "Unable to resolve user id"} + + +def test_user_role_remove(): + """ + Test if it add role for user in tenant (keystone user-role-add) + """ + MockUsers.flag = 1 + assert keystone.user_role_remove(user="nova") == { + "Error": "Unable to resolve user id" + } + + MockUsers.flag = 0 + MockTenants.flag = 1 + assert keystone.user_role_remove(user="nova", tenant="nova") == { + "Error": "Unable to resolve tenant/project id" + } + + MockTenants.flag = 0 + MockRoles.flag = 1 + assert keystone.user_role_remove(user="nova", tenant="nova", role="nova") == { + "Error": "Unable to resolve role id" + } + + ret = '"nova" role removed for user "nova" under "nova" tenant' + MockRoles.flag = 0 + assert keystone.user_role_remove(user="nova", tenant="nova", role="nova") == ret + + +def test_user_role_list(): + """ + Test if it return a list of available user_roles + (keystone user-roles-list) + """ + assert keystone.user_role_list(user="nova") == { + "Error": "Unable to resolve user or tenant/project id" + } + + assert keystone.user_role_list(user_name="nova", tenant_name="nova") == { + "nova": { + "id": "113", + "name": "nova", + "tenant_id": "446", + "user_id": "446", + } + } + + +def test_api_version_verify_ssl(): + """ + test api_version when using verify_ssl + """ + test_verify = [True, False, None] + conn_args = { + "keystone.user": "admin", + "connection_password": "password", + "connection_tenant": "admin", + "connection_tenant_id": "id", + "connection_auth_url": "https://127.0.0.1/v2.0/", + "connection_verify_ssl": True, + } + + http_ret = {"dict": {"version": {"id": "id_test"}}} + for verify in test_verify: + mock_http = MagicMock(return_value=http_ret) + patch_http = patch("salt.utils.http.query", mock_http) + conn_args["connection_verify_ssl"] = verify + if verify is None: + conn_args.pop("connection_verify_ssl") + verify = True + + with patch_http: + ret = keystone.api_version(**conn_args) + + assert mock_http.call_args_list == [ + call( + "https://127.0.0.1/v2.0/", + decode=True, + decode_type="json", + verify_ssl=verify, + ) + ] diff --git a/tests/pytests/unit/modules/test_linux_shadow.py b/tests/pytests/unit/modules/test_linux_shadow.py new file mode 100644 index 000000000000..5d977fc52121 --- /dev/null +++ b/tests/pytests/unit/modules/test_linux_shadow.py @@ -0,0 +1,324 @@ +""" + :codeauthor: Erik Johnson +""" +import types + +import pytest + +from tests.support.mock import DEFAULT, MagicMock, mock_open, patch + +pytestmark = [ + pytest.mark.skip_unless_on_linux, +] + +shadow = pytest.importorskip( + "salt.modules.linux_shadow", reason="shadow module is not available" +) +spwd = pytest.importorskip( + "spwd", reason="Standard library spwd module is not available" +) + + +def _pw_hash_ids(value): + return value.algorithm + + +@pytest.fixture( + params=[ + types.SimpleNamespace( + algorithm="md5", + clear="lamepassword", + pw_salt="TgIp9OTu", + pw_hash="$1$TgIp9OTu$.d0FFP6jVi5ANoQmk6GpM1", + pw_hash_passlib="$1$TgIp9OTu$.d0FFP6jVi5ANoQmk6GpM1", + ), + types.SimpleNamespace( + algorithm="sha256", + clear="lamepassword", + pw_salt="3vINbSrC", + pw_hash="$5$3vINbSrC$hH8A04jAY3bG123yU4FQ0wvP678QDTvWBhHHFbz6j0D", + pw_hash_passlib="$5$rounds=535000$3vINbSrC$YUDOmjJNDLWhL2Z7aAdLJnGIAsbUgkHNEcdUUujHHy8", + ), + types.SimpleNamespace( + algorithm="sha512", + clear="lamepassword", + pw_salt="PiGA3V2o", + pw_hash="$6$PiGA3V2o$/PrntRYufz49bRV/V5Eb1V6DdHaS65LB0fu73Tp/xxmDFr6HWJKptY2TvHRDViXZugWpnAcOnrbORpOgZUGTn.", + pw_hash_passlib="$6$rounds=656000$PiGA3V2o$eaAfTU0e1iUFcQycB94otS66/hTgVj94VIAaDp9IJHagSQ.gZascQYOE5.RO87kSY52lJ1LoYX8LNVa2OG8/U/", + ), + ], + ids=_pw_hash_ids, +) +def password(request): + # Not testing blowfish as it is not available on most Linux distros + return request.param + + +@pytest.fixture(params=["crypto", "passlib"]) +def library(request): + with patch("salt.utils.pycrypto.HAS_CRYPT", request.param == "crypto"): + yield request.param + + +@pytest.fixture +def configure_loader_modules(): + return {shadow: {}} + + +def test_gen_password(password, library): + """ + Test shadow.gen_password + """ + if library == "passlib": + pw_hash = password.pw_hash_passlib + else: + pw_hash = password.pw_hash + assert ( + shadow.gen_password( + password.clear, + crypt_salt=password.pw_salt, + algorithm=password.algorithm, + ) + == pw_hash + ) + + +def test_set_password(): + """ + Test the corner case in which shadow.set_password is called for a user + that has an entry in /etc/passwd but not /etc/shadow. + """ + original_lines = [ + "foo:orighash:17955::::::\n", + "bar:somehash:17955::::::\n", + ] + + data = { + "/etc/shadow": "".join(original_lines), + "*": Exception("Attempted to open something other than /etc/shadow"), + } + isfile_mock = MagicMock( + side_effect=lambda x: True if x == "/etc/shadow" else DEFAULT + ) + password = "newhash" + shadow_info_mock = MagicMock(return_value={"passwd": password}) + + # + # CASE 1: Normal password change + # + user = "bar" + user_exists_mock = MagicMock( + side_effect=lambda x, **y: 0 if x == ["id", user] else DEFAULT + ) + with patch( + "salt.utils.files.fopen", mock_open(read_data=data) + ) as shadow_mock, patch("os.path.isfile", isfile_mock), patch.object( + shadow, "info", shadow_info_mock + ), patch.dict( + shadow.__salt__, {"cmd.retcode": user_exists_mock} + ), patch.dict( + shadow.__grains__, {"os": "CentOS"} + ): + assert shadow.set_password(user, password, use_usermod=False) + + filehandles = shadow_mock.filehandles["/etc/shadow"] + # We should only have opened twice, once to read the contents and once + # to write. + assert len(filehandles) == 2 + # We're rewriting the entire file + assert filehandles[1].mode == "w+" + # We should be calling writelines instead of write, to rewrite the + # entire file. + assert len(filehandles[1].writelines_calls) == 1 + # Make sure we wrote the correct info + lines = filehandles[1].writelines_calls[0] + # Should only have the same two users in the file + assert len(lines) == 2 + # The first line should be unchanged + assert lines[0] == original_lines[0] + # The second line should have the new password hash + assert lines[1].split(":")[:2] == [user, password] + + # + # CASE 2: Corner case: no /etc/shadow entry for user + # + user = "baz" + user_exists_mock = MagicMock( + side_effect=lambda x, **y: 0 if x == ["id", user] else DEFAULT + ) + with patch( + "salt.utils.files.fopen", mock_open(read_data=data) + ) as shadow_mock, patch("os.path.isfile", isfile_mock), patch.object( + shadow, "info", shadow_info_mock + ), patch.dict( + shadow.__salt__, {"cmd.retcode": user_exists_mock} + ), patch.dict( + shadow.__grains__, {"os": "CentOS"} + ): + assert shadow.set_password(user, password, use_usermod=False) + + filehandles = shadow_mock.filehandles["/etc/shadow"] + # We should only have opened twice, once to read the contents and once + # to write. + assert len(filehandles) == 2 + # We're just appending to the file, not rewriting + assert filehandles[1].mode == "a+" + # We should only have written to the file once + assert len(filehandles[1].write_calls) == 1 + # Make sure we wrote the correct info + assert filehandles[1].write_calls[0].split(":")[:2] == [user, password] + + +def test_info(password): + """ + Test if info shows the correct user information + """ + + # First test is with a succesful call + expected_result = [ + ("expire", -1), + ("inact", -1), + ("lstchg", 31337), + ("max", 99999), + ("min", 0), + ("name", "foo"), + ("passwd", password.pw_hash), + ("warn", 7), + ] + getspnam_return = spwd.struct_spwd( + ["foo", password.pw_hash, 31337, 0, 99999, 7, -1, -1, -1] + ) + with patch("spwd.getspnam", return_value=getspnam_return): + result = shadow.info("foo") + assert expected_result == sorted(result.items(), key=lambda x: x[0]) + + # The next two is for a non-existent user + expected_result = [ + ("expire", ""), + ("inact", ""), + ("lstchg", ""), + ("max", ""), + ("min", ""), + ("name", ""), + ("passwd", ""), + ("warn", ""), + ] + # We get KeyError exception for non-existent users in glibc based systems + getspnam_return = KeyError + with patch("spwd.getspnam", side_effect=getspnam_return): + result = shadow.info("foo") + assert expected_result == sorted(result.items(), key=lambda x: x[0]) + # And FileNotFoundError in musl based systems + getspnam_return = FileNotFoundError + with patch("spwd.getspnam", side_effect=getspnam_return): + result = shadow.info("foo") + assert expected_result == sorted(result.items(), key=lambda x: x[0]) + + +def test_set_password_malformed_shadow_entry(): + """ + Test that Salt will repair a malformed shadow entry (that is, one that + doesn't have the correct number of fields). + """ + original_lines = [ + "valid:s00persekr1thash:17955::::::\n", + "tooshort:orighash:17955:::::\n", + "toolong:orighash:17955:::::::\n", + ] + data = { + "/etc/shadow": "".join(original_lines), + "*": Exception("Attempted to open something other than /etc/shadow"), + } + isfile_mock = MagicMock( + side_effect=lambda x: True if x == "/etc/shadow" else DEFAULT + ) + password = "newhash" + shadow_info_mock = MagicMock(return_value={"passwd": password}) + + # + # CASE 1: Fix an entry with too few fields + # + user = "tooshort" + user_exists_mock = MagicMock( + side_effect=lambda x, **y: 0 if x == ["id", user] else DEFAULT + ) + with patch( + "salt.utils.files.fopen", mock_open(read_data=data) + ) as shadow_mock, patch("os.path.isfile", isfile_mock), patch.object( + shadow, "info", shadow_info_mock + ), patch.dict( + shadow.__salt__, {"cmd.retcode": user_exists_mock} + ), patch.dict( + shadow.__grains__, {"os": "CentOS"} + ): + assert shadow.set_password(user, password, use_usermod=False) + + filehandles = shadow_mock.filehandles["/etc/shadow"] + # We should only have opened twice, once to read the contents and once + # to write. + assert len(filehandles) == 2 + # We're rewriting the entire file + assert filehandles[1].mode == "w+" + # We should be calling writelines instead of write, to rewrite the + # entire file. + assert len(filehandles[1].writelines_calls) == 1 + # Make sure we wrote the correct info + lines = filehandles[1].writelines_calls[0] + # Should only have the same three users in the file + assert len(lines) == 3 + # The first and third line should be unchanged + assert lines[0] == original_lines[0] + assert lines[2] == original_lines[2] + # The second line should have the new password hash, and it should have + # gotten "fixed" by adding another colon. + fixed = lines[1].split(":") + assert fixed[:2] == [user, password] + assert len(fixed) == 9 + + # + # CASE 2: Fix an entry with too many fields + # + user = "toolong" + user_exists_mock = MagicMock( + side_effect=lambda x, **y: 0 if x == ["id", user] else DEFAULT + ) + with patch( + "salt.utils.files.fopen", mock_open(read_data=data) + ) as shadow_mock, patch("os.path.isfile", isfile_mock), patch.object( + shadow, "info", shadow_info_mock + ), patch.dict( + shadow.__salt__, {"cmd.retcode": user_exists_mock} + ), patch.dict( + shadow.__grains__, {"os": "CentOS"} + ): + assert shadow.set_password(user, password, use_usermod=False) + + filehandles = shadow_mock.filehandles["/etc/shadow"] + # We should only have opened twice, once to read the contents and once + # to write. + assert len(filehandles) == 2 + # We're rewriting the entire file + assert filehandles[1].mode == "w+" + # We should be calling writelines instead of write, to rewrite the + # entire file. + assert len(filehandles[1].writelines_calls) == 1 + # Make sure we wrote the correct info + lines = filehandles[1].writelines_calls[0] + # Should only have the same three users in the file + assert len(lines) == 3 + # The first and second line should be unchanged + assert lines[0] == original_lines[0] + assert lines[1] == original_lines[1] + # The third line should have the new password hash, and it should have + # gotten "fixed" by reducing it to 9 fields instead of 10. + fixed = lines[2].split(":") + assert fixed[:2] == [user, password] + assert len(fixed) == 9 + + +@pytest.mark.skip_if_not_root +def test_list_users(): + """ + Test if it returns a list of all users + """ + assert shadow.list_users() diff --git a/tests/pytests/unit/modules/test_linux_sysctl.py b/tests/pytests/unit/modules/test_linux_sysctl.py index 0bdd24039d77..6b0875bc4602 100644 --- a/tests/pytests/unit/modules/test_linux_sysctl.py +++ b/tests/pytests/unit/modules/test_linux_sysctl.py @@ -215,7 +215,7 @@ def test_persist_no_conf_failure(): ): with pytest.raises(CommandExecutionError): linux_sysctl.persist("net.ipv4.ip_forward", 42, config=None) - fopen_mock.called_once() + fopen_mock.assert_called_once() def test_persist_no_conf_success(): @@ -353,7 +353,7 @@ def test_persist_value_with_spaces_already_set(tmp_path): """ config = str(tmp_path / "existing_sysctl_with_spaces.conf") value = "|/usr/share/kdump-tools/dump-core %p %s %t %e" - config_file_content = "kernel.core_pattern = {}\n".format(value) + config_file_content = f"kernel.core_pattern = {value}\n" with fopen(config, "w", encoding="utf-8") as config_file: config_file.write(config_file_content) mock_run = MagicMock(return_value=value) @@ -383,7 +383,7 @@ def test_persist_value_with_spaces_already_configured(tmp_path): """ config = str(tmp_path / "existing_sysctl_with_spaces.conf") value = "|/usr/share/kdump-tools/dump-core %p %s %t %e" - config_file_content = "kernel.core_pattern = {}\n".format(value) + config_file_content = f"kernel.core_pattern = {value}\n" with fopen(config, "w", encoding="utf-8") as config_file: config_file.write(config_file_content) mock_run = MagicMock(return_value="") @@ -451,7 +451,7 @@ def test_persist_value_with_spaces_update_config(tmp_path): assert os.path.isfile(config) with fopen(config, encoding="utf-8") as config_file: written = config_file.read() - assert written == "kernel.core_pattern = {}\n".format(value) + assert written == f"kernel.core_pattern = {value}\n" def test_persist_value_with_spaces_new_file(tmp_path): diff --git a/tests/pytests/unit/modules/test_mac_brew_pkg.py b/tests/pytests/unit/modules/test_mac_brew_pkg.py index f4ad86a66d2c..749c9765cce8 100644 --- a/tests/pytests/unit/modules/test_mac_brew_pkg.py +++ b/tests/pytests/unit/modules/test_mac_brew_pkg.py @@ -440,8 +440,9 @@ def test_homebrew_bin(HOMEBREW_BIN): Tests the path to the homebrew binary """ mock_path = MagicMock(return_value="/usr/local") - with patch.dict(mac_brew.__salt__, {"cmd.run": mock_path}): - assert mac_brew._homebrew_bin() == HOMEBREW_BIN + with patch("salt.utils.path.which", MagicMock(return_value=HOMEBREW_BIN)): + with patch.dict(mac_brew.__salt__, {"cmd.run": mock_path}): + assert mac_brew._homebrew_bin() == HOMEBREW_BIN # 'list_pkgs' function tests: 2 diff --git a/tests/pytests/unit/modules/test_mac_keychain.py b/tests/pytests/unit/modules/test_mac_keychain.py index eb411e69b57b..bbf9d20aaf41 100644 --- a/tests/pytests/unit/modules/test_mac_keychain.py +++ b/tests/pytests/unit/modules/test_mac_keychain.py @@ -65,7 +65,7 @@ def test_list_certs(): out = keychain.list_certs("/path/to/cert.p12") mock.assert_called_once_with( "security find-certificate -a /path/to/cert.p12 | " - 'grep -o "alis".*\\" | grep -o \'\\"[-A-Za-z0-9.:() ]*\\"\'', + 'grep -o "alis.*" | grep -o \'\\"[-A-Za-z0-9.:() ]*\\"\'', python_shell=True, ) @@ -79,7 +79,18 @@ def test_get_friendly_name(): expected = "ID Installer Salt" mock = MagicMock(return_value="friendlyName: ID Installer Salt") with patch.dict(keychain.__salt__, {"cmd.run": mock}): - out = keychain.get_friendly_name("/path/to/cert.p12", "passw0rd") + out = keychain.get_friendly_name("/path/to/cert.p12", "passw0rd", legacy=True) + mock.assert_called_once_with( + "openssl pkcs12 -legacy -in /path/to/cert.p12 -passin pass:passw0rd -info " + "-nodes -nokeys 2> /dev/null | grep friendlyName:", + python_shell=True, + ) + + assert out == expected + + mock = MagicMock(return_value="friendlyName: ID Installer Salt") + with patch.dict(keychain.__salt__, {"cmd.run": mock}): + out = keychain.get_friendly_name("/path/to/cert.p12", "passw0rd", legacy=False) mock.assert_called_once_with( "openssl pkcs12 -in /path/to/cert.p12 -passin pass:passw0rd -info " "-nodes -nokeys 2> /dev/null | grep friendlyName:", diff --git a/tests/pytests/unit/modules/test_mac_user.py b/tests/pytests/unit/modules/test_mac_user.py index b7ce56689d80..31e19d790e79 100644 --- a/tests/pytests/unit/modules/test_mac_user.py +++ b/tests/pytests/unit/modules/test_mac_user.py @@ -420,3 +420,24 @@ def test_list_users(): mac_user.__salt__, {"cmd.run_all": mock_run} ): assert mac_user.list_users() == expected + + +def test_kcpassword(): + hashes = { + # Actual hashes from macOS, since reference implementation didn't account for trailing null + "0": "4d 89 f9 91 1f 7a 46 5e f7 a8 11 ff", + "password": "0d e8 21 50 a5 d3 af 8e a3 de d9 14", + "shorterpwd": "0e e1 3d 51 a6 d9 af 9a d4 dd 1f 27", + "Squarepants": "2e f8 27 42 a0 d9 ad 8b cd cd 6c 7d", + "longerpasswd": "11 e6 3c 44 b7 ce ad 8b d0 ca 68 19 89 b1 65 ae 7e 89 12 b8 51 f8 f0 ff", + "ridiculouslyextendedpass": "0f e0 36 4a b1 c9 b1 85 d6 ca 73 04 ec 2a 57 b7 d2 b9 8f c7 c9 7e 0e fa 52 7b 71 e6 f8 b7 a6 ae 47 94 d7 86", + } + for password, hash in hashes.items(): + kcpass = mac_user._kcpassword(password) + hash = bytes.fromhex(hash) + + # macOS adds a trailing null and pads the rest with random data + length = len(password) + 1 + + assert kcpass[:length] == hash[:length] + assert len(kcpass) == len(hash) diff --git a/tests/pytests/unit/modules/test_mac_xattr.py b/tests/pytests/unit/modules/test_mac_xattr.py index c5816877e723..feacf2eed09a 100644 --- a/tests/pytests/unit/modules/test_mac_xattr.py +++ b/tests/pytests/unit/modules/test_mac_xattr.py @@ -74,6 +74,19 @@ def test_read_missing(): pytest.raises(CommandExecutionError, xattr.read, "/path/to/file", "attribute") +def test_read_not_decodeable(): + """ + Test reading an attribute which returns non-UTF-8 bytes + """ + with patch( + "salt.utils.mac_utils.execute_return_result", + MagicMock( + side_effect=UnicodeDecodeError("UTF-8", b"\xd1expected results", 0, 1, "") + ), + ): + assert xattr.read("/path/to/file", "com.attr") == "�expected results" + + def test_write(): """ Test writing a specific attribute to a file diff --git a/tests/pytests/unit/modules/test_msteams.py b/tests/pytests/unit/modules/test_msteams.py new file mode 100644 index 000000000000..a374dd903218 --- /dev/null +++ b/tests/pytests/unit/modules/test_msteams.py @@ -0,0 +1,33 @@ +import pytest + +import salt.modules.config as config +import salt.modules.msteams as msteams +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(minion_opts): + minion_opts.update({"msteams": {"hook_url": "https://example.com/web_hook"}}) + msteams_obj = { + msteams: {"__opts__": minion_opts, "__salt__": {"config.get": config.get}}, + config: { + "__opts__": minion_opts, + "__grains__": {}, + }, + } + return msteams_obj + + +def test_post_card(): + http_ret = {"status": 200} + http_mock = MagicMock(return_value=http_ret) + with patch("salt.utils.http.query", http_mock): + ret = msteams.post_card("test") + assert ret + http_mock.assert_called_once_with( + "https://example.com/web_hook", + method="POST", + header_dict={"Content-Type": "application/json"}, + data='{"text": "test", "title": null, "themeColor": null}', + status=True, + ) diff --git a/tests/pytests/unit/modules/test_nacl.py b/tests/pytests/unit/modules/test_nacl.py index b60dabb3018e..915e2c5e3ddb 100644 --- a/tests/pytests/unit/modules/test_nacl.py +++ b/tests/pytests/unit/modules/test_nacl.py @@ -1,10 +1,44 @@ """ Unit tests for the salt.modules.nacl module """ +import pytest -import salt.modules.nacl +import salt.utils.stringutils from tests.support.mock import patch +pytest.importorskip("nacl.public") +pytest.importorskip("nacl.secret") + +import salt.modules.nacl as nacl + + +@pytest.fixture +def configure_loader_modules(minion_opts): + utils = salt.loader.utils(minion_opts) + funcs = salt.loader.minion_mods(minion_opts, utils=utils) + return { + nacl: { + "__opts__": minion_opts, + "__utils__": utils, + "__salt__": funcs, + }, + } + + +@pytest.fixture +def test_keys(): + # Generate the keys + ret = nacl.keygen() + assert "pk" in ret + assert "sk" in ret + return ret["pk"], ret["sk"] + + +@pytest.fixture +def test_data(): + unencrypted_data = salt.utils.stringutils.to_bytes("hello") + return unencrypted_data + def test_fips_mode(): """ @@ -14,3 +48,53 @@ def test_fips_mode(): with patch("salt.modules.nacl.__opts__", opts, create=True): ret = salt.modules.nacl.__virtual__() assert ret == (False, "nacl module not available in FIPS mode") + + +def test_keygen(test_keys): + """ + Test keygen + """ + test_pk, test_sk = test_keys + assert len(test_pk) == 44 + assert len(test_sk) == 44 + + +def test_enc_dec(test_data, test_keys): + """ + Generate keys, encrypt, then decrypt. + """ + # Encrypt with pk + test_pk, test_sk = test_keys + encrypted_data = nacl.enc(data=test_data, pk=test_pk) + + # Decrypt with sk + decrypted_data = nacl.dec(data=encrypted_data, sk=test_sk) + assert test_data == decrypted_data + + +def test_sealedbox_enc_dec(test_data, test_keys): + """ + Generate keys, encrypt, then decrypt. + """ + # Encrypt with pk + test_pk, test_sk = test_keys + encrypted_data = nacl.sealedbox_encrypt(data=test_data, pk=test_pk) + + # Decrypt with sk + decrypted_data = nacl.sealedbox_decrypt(data=encrypted_data, sk=test_sk) + + assert test_data == decrypted_data + + +def test_secretbox_enc_dec(test_data, test_keys): + """ + Generate keys, encrypt, then decrypt. + """ + # Encrypt with sk + test_pk, test_sk = test_keys + encrypted_data = nacl.secretbox_encrypt(data=test_data, sk=test_sk) + + # Decrypt with sk + decrypted_data = nacl.secretbox_decrypt(data=encrypted_data, sk=test_sk) + + assert test_data == decrypted_data diff --git a/tests/pytests/unit/modules/test_npm.py b/tests/pytests/unit/modules/test_npm.py new file mode 100644 index 000000000000..61117ffcc125 --- /dev/null +++ b/tests/pytests/unit/modules/test_npm.py @@ -0,0 +1,199 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.npm +""" + + +import textwrap + +import pytest + +import salt.modules.npm as npm +import salt.utils.json +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + with patch("salt.modules.npm._check_valid_version", MagicMock(return_value=True)): + return {npm: {}} + + +# 'install' function tests: 4 + + +def test_install(): + """ + Test if it installs an NPM package. + """ + mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + pytest.raises(CommandExecutionError, npm.install, "coffee-script") + + # This is at least somewhat closer to the actual output format. + mock_json_out = textwrap.dedent( + """\ + [ + { + "salt": "SALT" + } + ]""" + ) + + # Successful run, expected output format + mock = MagicMock(return_value={"retcode": 0, "stderr": "", "stdout": mock_json_out}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.install("coffee-script") == [{"salt": "SALT"}] + + mock_json_out_extra = textwrap.dedent( + """\ + Compilation output here + + [bcrypt] Success: "/tmp/node_modules/bcrypt/foo" is installed via remote" + [grpc] Success: "/usr/lib/node_modules/@foo/bar" is installed via remote" + [ + { + "from" : "express@", + "name" : "express", + "dependencies" : { + "escape-html" : { + "from" : "escape-html@~1.0.3", + "dependencies" : {}, + "version" : "1.0.3" + } + }, + "version" : "4.16.3" + } + ]""" + ) + extra_expected = [ + { + "dependencies": { + "escape-html": { + "dependencies": {}, + "from": "escape-html@~1.0.3", + "version": "1.0.3", + } + }, + "from": "express@", + "name": "express", + "version": "4.16.3", + } + ] + + # Successful run, expected output format with additional leading text + mock = MagicMock( + return_value={"retcode": 0, "stderr": "", "stdout": mock_json_out_extra} + ) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.install("coffee-script") == extra_expected + + # Successful run, unexpected output format + mock = MagicMock(return_value={"retcode": 0, "stderr": "", "stdout": "SALT"}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + mock_err = MagicMock(side_effect=ValueError()) + # When JSON isn't successfully parsed, return should equal input + with patch.object(salt.utils.json, "loads", mock_err): + assert npm.install("coffee-script") == "SALT" + + +# 'uninstall' function tests: 1 + + +def test_uninstall(): + """ + Test if it uninstalls an NPM package. + """ + mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert not npm.uninstall("coffee-script") + + mock = MagicMock(return_value={"retcode": 0, "stderr": ""}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.uninstall("coffee-script") + + +# 'list_' function tests: 1 + + +def test_list(): + """ + Test if it list installed NPM packages. + """ + mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + pytest.raises(CommandExecutionError, npm.list_, "coffee-script") + + mock = MagicMock( + return_value={ + "retcode": 0, + "stderr": "error", + "stdout": '{"salt": ["SALT"]}', + } + ) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + mock_err = MagicMock(return_value={"dependencies": "SALT"}) + with patch.object(salt.utils.json, "loads", mock_err): + assert npm.list_("coffee-script") == "SALT" + + +# 'cache_clean' function tests: 1 + + +def test_cache_clean(): + """ + Test if it cleans the cached NPM packages. + """ + mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert not npm.cache_clean() + + mock = MagicMock(return_value={"retcode": 0}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.cache_clean() + + mock = MagicMock(return_value={"retcode": 0}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.cache_clean("coffee-script") + + +# 'cache_list' function tests: 1 + + +def test_cache_list(): + """ + Test if it lists the NPM cache. + """ + mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + pytest.raises(CommandExecutionError, npm.cache_list) + + mock = MagicMock( + return_value={"retcode": 0, "stderr": "error", "stdout": ["~/.npm"]} + ) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.cache_list() == ["~/.npm"] + + mock = MagicMock(return_value={"retcode": 0, "stderr": "error", "stdout": ""}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.cache_list("coffee-script") == "" + + +# 'cache_path' function tests: 1 + + +def test_cache_path(): + """ + Test if it prints the NPM cache path. + """ + mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.cache_path() == "error" + + mock = MagicMock( + return_value={"retcode": 0, "stderr": "error", "stdout": "/User/salt/.npm"} + ) + with patch.dict(npm.__salt__, {"cmd.run_all": mock}): + assert npm.cache_path() == "/User/salt/.npm" diff --git a/tests/pytests/unit/modules/test_openscap.py b/tests/pytests/unit/modules/test_openscap.py new file mode 100644 index 000000000000..89af9c634ff6 --- /dev/null +++ b/tests/pytests/unit/modules/test_openscap.py @@ -0,0 +1,211 @@ +import subprocess + +import pytest + +import salt.modules.openscap as openscap +from tests.support.mock import MagicMock, Mock, patch + + +@pytest.fixture +def policy_file(): + yield "/usr/share/openscap/policy-file-xccdf.xml" + + +@pytest.fixture +def configure_loader_modules(tmp_path): + return { + openscap: { + "__salt__": MagicMock(), + } + } + + +def test_openscap_xccdf_eval_success(policy_file, tmp_path): + patch_rmtree = patch("shutil.rmtree", Mock()) + mock_mkdtemp = Mock(return_value=str(tmp_path)) + patch_mkdtemp = patch("tempfile.mkdtemp", mock_mkdtemp) + mock_popen = MagicMock( + return_value=Mock(**{"returncode": 0, "communicate.return_value": ("", "")}) + ) + patch_popen = patch.object(openscap, "Popen", mock_popen) + with patch_popen, patch_rmtree, patch_mkdtemp: + response = openscap.xccdf("eval --profile Default {}".format(policy_file)) + + assert mock_mkdtemp.call_count == 1 + expected_cmd = [ + "oscap", + "xccdf", + "eval", + "--oval-results", + "--results", + "results.xml", + "--report", + "report.html", + "--profile", + "Default", + policy_file, + ] + openscap.Popen.assert_called_once_with( + expected_cmd, + cwd=openscap.tempfile.mkdtemp.return_value, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + openscap.__salt__["cp.push_dir"].assert_called_once_with(str(tmp_path)) + assert openscap.shutil.rmtree.call_count == 1 + expected = { + "upload_dir": str(tmp_path), + "error": "", + "success": True, + "returncode": 0, + } + assert response == expected + + +def test_openscap_xccdf_eval_success_with_failing_rules(policy_file, tmp_path): + patch_rmtree = patch("shutil.rmtree", Mock()) + mock_mkdtemp = Mock(return_value=str(tmp_path)) + patch_mkdtemp = patch("tempfile.mkdtemp", mock_mkdtemp) + mock_popen = MagicMock( + return_value=Mock( + **{"returncode": 2, "communicate.return_value": ("", "some error")} + ) + ) + patch_popen = patch.object(openscap, "Popen", mock_popen) + with patch_popen, patch_mkdtemp, patch_rmtree as mock_rmtree: + response = openscap.xccdf("eval --profile Default {}".format(policy_file)) + + assert mock_mkdtemp.call_count == 1 + expected_cmd = [ + "oscap", + "xccdf", + "eval", + "--oval-results", + "--results", + "results.xml", + "--report", + "report.html", + "--profile", + "Default", + policy_file, + ] + openscap.Popen.assert_called_once_with( + expected_cmd, + cwd=openscap.tempfile.mkdtemp.return_value, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + openscap.__salt__["cp.push_dir"].assert_called_once_with(str(tmp_path)) + assert mock_rmtree.call_count == 1 + expected = { + "upload_dir": str(tmp_path), + "error": "some error", + "success": True, + "returncode": 2, + } + assert response == expected + + +def test_openscap_xccdf_eval_fail_no_profile(): + response = openscap.xccdf("eval --param Default /unknown/param") + error = "the following arguments are required: --profile" + expected = { + "error": error, + "upload_dir": None, + "success": False, + "returncode": None, + } + assert response == expected + + +def test_openscap_xccdf_eval_success_ignore_unknown_params(tmp_path): + mock_mkdtemp = Mock(return_value=str(tmp_path)) + patch_mkdtemp = patch("tempfile.mkdtemp", mock_mkdtemp) + mock_popen = MagicMock( + return_value=Mock( + **{"returncode": 2, "communicate.return_value": ("", "some error")} + ) + ) + patch_popen = patch("salt.modules.openscap.Popen", mock_popen) + with patch_popen, patch_mkdtemp: + response = openscap.xccdf("eval --profile Default --param Default /policy/file") + expected = { + "upload_dir": str(tmp_path), + "error": "some error", + "success": True, + "returncode": 2, + } + assert response == expected + expected_cmd = [ + "oscap", + "xccdf", + "eval", + "--oval-results", + "--results", + "results.xml", + "--report", + "report.html", + "--profile", + "Default", + "/policy/file", + ] + openscap.Popen.assert_called_once_with( + expected_cmd, + cwd=openscap.tempfile.mkdtemp.return_value, + stderr=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + + +def test_openscap_xccdf_eval_evaluation_error(policy_file): + mock_popen = MagicMock( + return_value=Mock( + **{ + "returncode": 1, + "communicate.return_value": ("", "evaluation error"), + } + ) + ) + patch_popen = patch("salt.modules.openscap.Popen", mock_popen) + with patch_popen: + response = openscap.xccdf("eval --profile Default {}".format(policy_file)) + expected = { + "upload_dir": None, + "error": "evaluation error", + "success": False, + "returncode": 1, + } + assert response == expected + + +def test_openscap_xccdf_eval_fail_not_implemented_action(policy_file): + response = openscap.xccdf("info {}".format(policy_file)) + mock_err = "argument action: invalid choice: 'info' (choose from 'eval')" + expected = { + "upload_dir": None, + "error": mock_err, + "success": False, + "returncode": None, + } + assert response == expected + + +def test_openscap_xccdf_eval_evaluation_unknown_error(policy_file): + mock_popen = MagicMock( + return_value=Mock( + **{ + "returncode": 255, + "communicate.return_value": ("", "unknown error"), + } + ) + ) + patch_popen = patch("salt.modules.openscap.Popen", mock_popen) + with patch_popen: + response = openscap.xccdf("eval --profile Default {}".format(policy_file)) + expected = { + "upload_dir": None, + "error": "unknown error", + "success": False, + "returncode": 255, + } + assert response == expected diff --git a/tests/pytests/unit/modules/test_pillar.py b/tests/pytests/unit/modules/test_pillar.py index f2b07d74abf6..6de7e33d535e 100644 --- a/tests/pytests/unit/modules/test_pillar.py +++ b/tests/pytests/unit/modules/test_pillar.py @@ -163,3 +163,20 @@ def test_pillar_get_int_key(): res = pillarmod.get(key=12345, default=default, merge=True) assert {"l2": {"l3": "my_luggage_code"}} == res assert {"l2": {"l3": "your_luggage_code"}} == default + + +def test_pillar_keys(): + """ + Confirm that we can access pillar keys + """ + with patch.dict(pillarmod.__pillar__, {"pkg": {"apache": "httpd"}}): + test_key = "pkg" + assert pillarmod.keys(test_key) == ["apache"] + + with patch.dict( + pillarmod.__pillar__, + {"12345": {"xyz": "my_luggage_code"}, "7": {"11": {"12": "13"}}}, + ): + test_key = "7:11" + res = pillarmod.keys(test_key) + assert res == ["12"] diff --git a/tests/pytests/unit/modules/test_pip.py b/tests/pytests/unit/modules/test_pip.py index d3565ec07ae4..b7ad1ea3fd26 100644 --- a/tests/pytests/unit/modules/test_pip.py +++ b/tests/pytests/unit/modules/test_pip.py @@ -79,7 +79,15 @@ def test__pip_bin_env_no_bin_env(): assert ret is None -def test_install_frozen_app(): +@pytest.fixture +def python_binary(): + binary = [sys.executable, "-m", "pip"] + if hasattr(sys, "RELENV"): + binary = [str(sys.RELENV / "salt-pip")] + return binary + + +def test_install_frozen_app(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch("sys.frozen", True, create=True): @@ -87,8 +95,7 @@ def test_install_frozen_app(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg) expected = [ - sys.executable, - "pip", + *python_binary, "install", pkg, ] @@ -101,7 +108,7 @@ def test_install_frozen_app(): ) -def test_install_source_app(): +def test_install_source_app(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch("sys.frozen", False, create=True): @@ -109,9 +116,7 @@ def test_install_source_app(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", pkg, ] @@ -124,14 +129,12 @@ def test_install_source_app(): ) -def test_fix4361(): +def test_fix4361(python_binary): mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(requirements="requirements.txt") expected_cmd = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--requirement", "requirements.txt", @@ -155,13 +158,13 @@ def test_install_editable_without_egg_fails(): ) -def test_install_multiple_editable(): +def test_install_multiple_editable(python_binary): editables = [ "git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr", "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", ] - expected = [sys.executable, "-m", "pip", "install"] + expected = [*python_binary, "install"] for item in editables: expected.extend(["--editable", item]) @@ -190,14 +193,14 @@ def test_install_multiple_editable(): ) -def test_install_multiple_pkgs_and_editables(): +def test_install_multiple_pkgs_and_editables(python_binary): pkgs = ["pep8", "salt"] editables = [ "git+https://github.com/saltstack/istr.git@v1.0.1#egg=iStr", "git+https://github.com/saltstack/salt-testing.git#egg=SaltTesting", ] - expected = [sys.executable, "-m", "pip", "install"] + expected = [*python_binary, "install"] expected.extend(pkgs) for item in editables: expected.extend(["--editable", item]) @@ -231,9 +234,7 @@ def test_install_multiple_pkgs_and_editables(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkgs=pkgs[0], editable=editables[0]) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", pkgs[0], "--editable", @@ -248,7 +249,7 @@ def test_install_multiple_pkgs_and_editables(): ) -def test_issue5940_install_multiple_pip_mirrors(): +def test_issue5940_install_multiple_pip_mirrors(python_binary): """ test multiple pip mirrors. This test only works with pip < 7.0.0 """ @@ -259,7 +260,7 @@ def test_issue5940_install_multiple_pip_mirrors(): "http://pypi.crate.io", ] - expected = [sys.executable, "-m", "pip", "install", "--use-mirrors"] + expected = [*python_binary, "install", "--use-mirrors"] for item in mirrors: expected.extend(["--mirrors", item]) expected.append("pep8") @@ -289,9 +290,7 @@ def test_issue5940_install_multiple_pip_mirrors(): ) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--use-mirrors", "--mirrors", @@ -312,7 +311,7 @@ def test_issue5940_install_multiple_pip_mirrors(): ) -def test_install_with_multiple_find_links(): +def test_install_with_multiple_find_links(python_binary): find_links = [ "http://g.pypi.python.org", "http://c.pypi.python.org", @@ -320,7 +319,7 @@ def test_install_with_multiple_find_links(): ] pkg = "pep8" - expected = [sys.executable, "-m", "pip", "install"] + expected = [*python_binary, "install"] for item in find_links: expected.extend(["--find-links", item]) expected.append(pkg) @@ -362,9 +361,7 @@ def test_install_with_multiple_find_links(): ) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--find-links", find_links[0], @@ -422,16 +419,14 @@ def test_install_failed_cached_requirements(): assert "my_test_reqs" in ret["comment"] -def test_install_cached_requirements_used(): +def test_install_cached_requirements_used(python_binary): with patch("salt.modules.pip._get_cached_requirements") as get_cached_requirements: get_cached_requirements.return_value = "my_cached_reqs" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(requirements="salt://requirements.txt") expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--requirement", "my_cached_reqs", @@ -479,7 +474,7 @@ def join(*args): ) -def test_install_log_argument_in_resulting_command(): +def test_install_log_argument_in_resulting_command(python_binary): with patch("os.access") as mock_path: pkg = "pep8" log_path = "/tmp/pip-install.log" @@ -487,9 +482,7 @@ def test_install_log_argument_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, log=log_path) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--log", log_path, @@ -515,10 +508,10 @@ def test_non_writeable_log(): pytest.raises(IOError, pip.install, pkg, log=log_path) -def test_install_timeout_argument_in_resulting_command(): +def test_install_timeout_argument_in_resulting_command(python_binary): # Passing an int pkg = "pep8" - expected = [sys.executable, "-m", "pip", "install", "--timeout"] + expected = [*python_binary, "install", "--timeout"] mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, timeout=10) @@ -548,16 +541,14 @@ def test_install_timeout_argument_in_resulting_command(): pytest.raises(ValueError, pip.install, pkg, timeout="a") -def test_install_index_url_argument_in_resulting_command(): +def test_install_index_url_argument_in_resulting_command(python_binary): pkg = "pep8" index_url = "http://foo.tld" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, index_url=index_url) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--index-url", index_url, @@ -572,16 +563,14 @@ def test_install_index_url_argument_in_resulting_command(): ) -def test_install_extra_index_url_argument_in_resulting_command(): +def test_install_extra_index_url_argument_in_resulting_command(python_binary): pkg = "pep8" extra_index_url = "http://foo.tld" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, extra_index_url=extra_index_url) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--extra-index-url", extra_index_url, @@ -596,12 +585,12 @@ def test_install_extra_index_url_argument_in_resulting_command(): ) -def test_install_no_index_argument_in_resulting_command(): +def test_install_no_index_argument_in_resulting_command(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, no_index=True) - expected = [sys.executable, "-m", "pip", "install", "--no-index", pkg] + expected = [*python_binary, "install", "--no-index", pkg] mock.assert_called_with( expected, saltenv="base", @@ -611,13 +600,13 @@ def test_install_no_index_argument_in_resulting_command(): ) -def test_install_build_argument_in_resulting_command(): +def test_install_build_argument_in_resulting_command(python_binary): pkg = "pep8" build = "/tmp/foo" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, build=build) - expected = [sys.executable, "-m", "pip", "install", "--build", build, pkg] + expected = [*python_binary, "install", "--build", build, pkg] mock.assert_called_with( expected, saltenv="base", @@ -627,13 +616,13 @@ def test_install_build_argument_in_resulting_command(): ) -def test_install_target_argument_in_resulting_command(): +def test_install_target_argument_in_resulting_command(python_binary): pkg = "pep8" target = "/tmp/foo" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, target=target) - expected = [sys.executable, "-m", "pip", "install", "--target", target, pkg] + expected = [*python_binary, "install", "--target", target, pkg] mock.assert_called_with( expected, saltenv="base", @@ -643,16 +632,14 @@ def test_install_target_argument_in_resulting_command(): ) -def test_install_download_argument_in_resulting_command(): +def test_install_download_argument_in_resulting_command(python_binary): pkg = "pep8" download = "/tmp/foo" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, download=download) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--download", download, @@ -667,12 +654,12 @@ def test_install_download_argument_in_resulting_command(): ) -def test_install_no_download_argument_in_resulting_command(): +def test_install_no_download_argument_in_resulting_command(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, no_download=True) - expected = [sys.executable, "-m", "pip", "install", "--no-download", pkg] + expected = [*python_binary, "install", "--no-download", pkg] mock.assert_called_with( expected, saltenv="base", @@ -682,7 +669,7 @@ def test_install_no_download_argument_in_resulting_command(): ) -def test_install_download_cache_dir_arguments_in_resulting_command(): +def test_install_download_cache_dir_arguments_in_resulting_command(python_binary): pkg = "pep8" cache_dir_arg_mapping = { "1.5.6": "--download-cache", @@ -697,9 +684,7 @@ def test_install_download_cache_dir_arguments_in_resulting_command(): # test `download_cache` kwarg pip.install(pkg, download_cache="/tmp/foo") expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", cmd_arg, download_cache, @@ -724,13 +709,13 @@ def test_install_download_cache_dir_arguments_in_resulting_command(): ) -def test_install_source_argument_in_resulting_command(): +def test_install_source_argument_in_resulting_command(python_binary): pkg = "pep8" source = "/tmp/foo" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, source=source) - expected = [sys.executable, "-m", "pip", "install", "--source", source, pkg] + expected = [*python_binary, "install", "--source", source, pkg] mock.assert_called_with( expected, saltenv="base", @@ -740,16 +725,14 @@ def test_install_source_argument_in_resulting_command(): ) -def test_install_exists_action_argument_in_resulting_command(): +def test_install_exists_action_argument_in_resulting_command(python_binary): pkg = "pep8" for action in ("s", "i", "w", "b"): mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, exists_action=action) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--exists-action", action, @@ -769,11 +752,11 @@ def test_install_exists_action_argument_in_resulting_command(): pytest.raises(CommandExecutionError, pip.install, pkg, exists_action="d") -def test_install_install_options_argument_in_resulting_command(): +def test_install_install_options_argument_in_resulting_command(python_binary): install_options = ["--exec-prefix=/foo/bar", "--install-scripts=/foo/bar/bin"] pkg = "pep8" - expected = [sys.executable, "-m", "pip", "install"] + expected = [*python_binary, "install"] for item in install_options: expected.extend(["--install-option", item]) expected.append(pkg) @@ -807,9 +790,7 @@ def test_install_install_options_argument_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, install_options=install_options[0]) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--install-option", install_options[0], @@ -824,11 +805,11 @@ def test_install_install_options_argument_in_resulting_command(): ) -def test_install_global_options_argument_in_resulting_command(): +def test_install_global_options_argument_in_resulting_command(python_binary): global_options = ["--quiet", "--no-user-cfg"] pkg = "pep8" - expected = [sys.executable, "-m", "pip", "install"] + expected = [*python_binary, "install"] for item in global_options: expected.extend(["--global-option", item]) expected.append(pkg) @@ -862,9 +843,7 @@ def test_install_global_options_argument_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, global_options=global_options[0]) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--global-option", global_options[0], @@ -879,12 +858,12 @@ def test_install_global_options_argument_in_resulting_command(): ) -def test_install_upgrade_argument_in_resulting_command(): +def test_install_upgrade_argument_in_resulting_command(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, upgrade=True) - expected = [sys.executable, "-m", "pip", "install", "--upgrade", pkg] + expected = [*python_binary, "install", "--upgrade", pkg] mock.assert_called_with( expected, saltenv="base", @@ -894,15 +873,13 @@ def test_install_upgrade_argument_in_resulting_command(): ) -def test_install_force_reinstall_argument_in_resulting_command(): +def test_install_force_reinstall_argument_in_resulting_command(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, force_reinstall=True) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--force-reinstall", pkg, @@ -916,15 +893,13 @@ def test_install_force_reinstall_argument_in_resulting_command(): ) -def test_install_ignore_installed_argument_in_resulting_command(): +def test_install_ignore_installed_argument_in_resulting_command(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, ignore_installed=True) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--ignore-installed", pkg, @@ -938,12 +913,12 @@ def test_install_ignore_installed_argument_in_resulting_command(): ) -def test_install_no_deps_argument_in_resulting_command(): +def test_install_no_deps_argument_in_resulting_command(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, no_deps=True) - expected = [sys.executable, "-m", "pip", "install", "--no-deps", pkg] + expected = [*python_binary, "install", "--no-deps", pkg] mock.assert_called_with( expected, saltenv="base", @@ -953,12 +928,12 @@ def test_install_no_deps_argument_in_resulting_command(): ) -def test_install_no_install_argument_in_resulting_command(): +def test_install_no_install_argument_in_resulting_command(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, no_install=True) - expected = [sys.executable, "-m", "pip", "install", "--no-install", pkg] + expected = [*python_binary, "install", "--no-install", pkg] mock.assert_called_with( expected, saltenv="base", @@ -968,13 +943,13 @@ def test_install_no_install_argument_in_resulting_command(): ) -def test_install_proxy_argument_in_resulting_command(): +def test_install_proxy_argument_in_resulting_command(python_binary): pkg = "pep8" proxy = "salt-user:salt-passwd@salt-proxy:3128" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(pkg, proxy=proxy) - expected = [sys.executable, "-m", "pip", "install", "--proxy", proxy, pkg] + expected = [*python_binary, "install", "--proxy", proxy, pkg] mock.assert_called_with( expected, saltenv="base", @@ -984,7 +959,7 @@ def test_install_proxy_argument_in_resulting_command(): ) -def test_install_proxy_false_argument_in_resulting_command(): +def test_install_proxy_false_argument_in_resulting_command(python_binary): """ Checking that there is no proxy set if proxy arg is set to False even if the global proxy is set. @@ -1001,7 +976,7 @@ def test_install_proxy_false_argument_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): with patch.dict(pip.__opts__, config_mock): pip.install(pkg, proxy=proxy) - expected = [sys.executable, "-m", "pip", "install", pkg] + expected = [*python_binary, "install", pkg] mock.assert_called_with( expected, saltenv="base", @@ -1011,7 +986,7 @@ def test_install_proxy_false_argument_in_resulting_command(): ) -def test_install_global_proxy_in_resulting_command(): +def test_install_global_proxy_in_resulting_command(python_binary): """ Checking that there is proxy set if global proxy is set. """ @@ -1028,9 +1003,7 @@ def test_install_global_proxy_in_resulting_command(): with patch.dict(pip.__opts__, config_mock): pip.install(pkg) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--proxy", proxy, @@ -1045,13 +1018,13 @@ def test_install_global_proxy_in_resulting_command(): ) -def test_install_multiple_requirements_arguments_in_resulting_command(): +def test_install_multiple_requirements_arguments_in_resulting_command(python_binary): with patch("salt.modules.pip._get_cached_requirements") as get_cached_requirements: cached_reqs = ["my_cached_reqs-1", "my_cached_reqs-2"] get_cached_requirements.side_effect = cached_reqs requirements = ["salt://requirements-1.txt", "salt://requirements-2.txt"] - expected = [sys.executable, "-m", "pip", "install"] + expected = [*python_binary, "install"] for item in cached_reqs: expected.extend(["--requirement", item]) @@ -1086,9 +1059,7 @@ def test_install_multiple_requirements_arguments_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.install(requirements=requirements[0]) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", "--requirement", cached_reqs[0], @@ -1102,7 +1073,7 @@ def test_install_multiple_requirements_arguments_in_resulting_command(): ) -def test_install_extra_args_arguments_in_resulting_command(): +def test_install_extra_args_arguments_in_resulting_command(python_binary): pkg = "pep8" mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): @@ -1110,9 +1081,7 @@ def test_install_extra_args_arguments_in_resulting_command(): pkg, extra_args=[{"--latest-pip-kwarg": "param"}, "--latest-pip-arg"] ) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "install", pkg, "--latest-pip-kwarg", @@ -1148,13 +1117,13 @@ def test_install_extra_args_arguments_recursion_error(): ) -def test_uninstall_multiple_requirements_arguments_in_resulting_command(): +def test_uninstall_multiple_requirements_arguments_in_resulting_command(python_binary): with patch("salt.modules.pip._get_cached_requirements") as get_cached_requirements: cached_reqs = ["my_cached_reqs-1", "my_cached_reqs-2"] get_cached_requirements.side_effect = cached_reqs requirements = ["salt://requirements-1.txt", "salt://requirements-2.txt"] - expected = [sys.executable, "-m", "pip", "uninstall", "-y"] + expected = [*python_binary, "uninstall", "-y"] for item in cached_reqs: expected.extend(["--requirement", item]) @@ -1191,9 +1160,7 @@ def test_uninstall_multiple_requirements_arguments_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.uninstall(requirements=requirements[0]) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "uninstall", "-y", "--requirement", @@ -1209,7 +1176,7 @@ def test_uninstall_multiple_requirements_arguments_in_resulting_command(): ) -def test_uninstall_global_proxy_in_resulting_command(): +def test_uninstall_global_proxy_in_resulting_command(python_binary): """ Checking that there is proxy set if global proxy is set. """ @@ -1226,9 +1193,7 @@ def test_uninstall_global_proxy_in_resulting_command(): with patch.dict(pip.__opts__, config_mock): pip.uninstall(pkg) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "uninstall", "-y", "--proxy", @@ -1245,7 +1210,7 @@ def test_uninstall_global_proxy_in_resulting_command(): ) -def test_uninstall_proxy_false_argument_in_resulting_command(): +def test_uninstall_proxy_false_argument_in_resulting_command(python_binary): """ Checking that there is no proxy set if proxy arg is set to False even if the global proxy is set. @@ -1262,7 +1227,7 @@ def test_uninstall_proxy_false_argument_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): with patch.dict(pip.__opts__, config_mock): pip.uninstall(pkg, proxy=proxy) - expected = [sys.executable, "-m", "pip", "uninstall", "-y", pkg] + expected = [*python_binary, "uninstall", "-y", pkg] mock.assert_called_with( expected, saltenv="base", @@ -1273,7 +1238,7 @@ def test_uninstall_proxy_false_argument_in_resulting_command(): ) -def test_uninstall_log_argument_in_resulting_command(): +def test_uninstall_log_argument_in_resulting_command(python_binary): pkg = "pep8" log_path = "/tmp/pip-install.log" @@ -1281,9 +1246,7 @@ def test_uninstall_log_argument_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): pip.uninstall(pkg, log=log_path) expected = [ - sys.executable, - "-m", - "pip", + *python_binary, "uninstall", "-y", "--log", @@ -1307,9 +1270,9 @@ def test_uninstall_log_argument_in_resulting_command(): pytest.raises(IOError, pip.uninstall, pkg, log=log_path) -def test_uninstall_timeout_argument_in_resulting_command(): +def test_uninstall_timeout_argument_in_resulting_command(python_binary): pkg = "pep8" - expected = [sys.executable, "-m", "pip", "uninstall", "-y", "--timeout"] + expected = [*python_binary, "uninstall", "-y", "--timeout"] # Passing an int mock = MagicMock(return_value={"retcode": 0, "stdout": ""}) with patch.dict(pip.__salt__, {"cmd.run_all": mock}): @@ -1342,8 +1305,8 @@ def test_uninstall_timeout_argument_in_resulting_command(): pytest.raises(ValueError, pip.uninstall, pkg, timeout="a") -def test_freeze_command(): - expected = [sys.executable, "-m", "pip", "freeze"] +def test_freeze_command(python_binary): + expected = [*python_binary, "freeze"] eggs = [ "M2Crypto==0.21.1", "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", @@ -1389,7 +1352,7 @@ def test_freeze_command(): ) -def test_freeze_command_with_all(): +def test_freeze_command_with_all(python_binary): eggs = [ "M2Crypto==0.21.1", "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", @@ -1403,7 +1366,7 @@ def test_freeze_command_with_all(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): with patch("salt.modules.pip.version", MagicMock(return_value="9.0.1")): ret = pip.freeze() - expected = [sys.executable, "-m", "pip", "freeze", "--all"] + expected = [*python_binary, "freeze", "--all"] mock.assert_called_with( expected, cwd=None, @@ -1423,7 +1386,7 @@ def test_freeze_command_with_all(): ) -def test_list_freeze_parse_command(): +def test_list_freeze_parse_command(python_binary): eggs = [ "M2Crypto==0.21.1", "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", @@ -1436,7 +1399,7 @@ def test_list_freeze_parse_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): with patch("salt.modules.pip.version", MagicMock(return_value=mock_version)): ret = pip.list_freeze_parse() - expected = [sys.executable, "-m", "pip", "freeze"] + expected = [*python_binary, "freeze"] mock.assert_called_with( expected, cwd=None, @@ -1463,7 +1426,7 @@ def test_list_freeze_parse_command(): ) -def test_list_freeze_parse_command_with_all(): +def test_list_freeze_parse_command_with_all(python_binary): eggs = [ "M2Crypto==0.21.1", "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", @@ -1481,7 +1444,7 @@ def test_list_freeze_parse_command_with_all(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): with patch("salt.modules.pip.version", MagicMock(return_value=mock_version)): ret = pip.list_freeze_parse() - expected = [sys.executable, "-m", "pip", "freeze", "--all"] + expected = [*python_binary, "freeze", "--all"] mock.assert_called_with( expected, cwd=None, @@ -1509,7 +1472,7 @@ def test_list_freeze_parse_command_with_all(): ) -def test_list_freeze_parse_command_with_prefix(): +def test_list_freeze_parse_command_with_prefix(python_binary): eggs = [ "M2Crypto==0.21.1", "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", @@ -1521,7 +1484,7 @@ def test_list_freeze_parse_command_with_prefix(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ret = pip.list_freeze_parse(prefix="bb") - expected = [sys.executable, "-m", "pip", "freeze"] + expected = [*python_binary, "freeze"] mock.assert_called_with( expected, cwd=None, @@ -1532,7 +1495,7 @@ def test_list_freeze_parse_command_with_prefix(): assert ret == {"bbfreeze-loader": "1.1.0", "bbfreeze": "1.1.0"} -def test_list_upgrades_legacy(): +def test_list_upgrades_legacy(python_binary): eggs = [ "apache-libcloud (Current: 1.1.0 Latest: 2.2.1 [wheel])", "appdirs (Current: 1.4.1 Latest: 1.4.3 [wheel])", @@ -1543,7 +1506,7 @@ def test_list_upgrades_legacy(): with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ret = pip.list_upgrades() mock.assert_called_with( - [sys.executable, "-m", "pip", "list", "--outdated"], + [*python_binary, "list", "--outdated"], cwd=None, runas=None, ) @@ -1554,7 +1517,7 @@ def test_list_upgrades_legacy(): } -def test_list_upgrades_gt9(): +def test_list_upgrades_gt9(python_binary): eggs = """[{"latest_filetype": "wheel", "version": "1.1.0", "name": "apache-libcloud", "latest_version": "2.2.1"}, {"latest_filetype": "wheel", "version": "1.4.1", "name": "appdirs", "latest_version": "1.4.3"}, {"latest_filetype": "sdist", "version": "1.11.63", "name": "awscli", "latest_version": "1.12.1"} @@ -1565,9 +1528,7 @@ def test_list_upgrades_gt9(): ret = pip.list_upgrades() mock.assert_called_with( [ - sys.executable, - "-m", - "pip", + *python_binary, "list", "--outdated", "--format=json", @@ -1582,7 +1543,7 @@ def test_list_upgrades_gt9(): } -def test_is_installed_true(): +def test_is_installed_true(python_binary): eggs = [ "M2Crypto==0.21.1", "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", @@ -1595,7 +1556,7 @@ def test_is_installed_true(): with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ret = pip.is_installed(pkgname="bbfreeze") mock.assert_called_with( - [sys.executable, "-m", "pip", "freeze"], + [*python_binary, "freeze"], cwd=None, runas=None, python_shell=False, @@ -1604,7 +1565,7 @@ def test_is_installed_true(): assert ret -def test_is_installed_false(): +def test_is_installed_false(python_binary): eggs = [ "M2Crypto==0.21.1", "-e git+git@github.com:s0undt3ch/salt-testing.git@9ed81aa2f918d59d3706e56b18f0782d1ea43bf8#egg=SaltTesting-dev", @@ -1617,7 +1578,7 @@ def test_is_installed_false(): with patch("salt.modules.pip.version", MagicMock(return_value="6.1.1")): ret = pip.is_installed(pkgname="notexist") mock.assert_called_with( - [sys.executable, "-m", "pip", "freeze"], + [*python_binary, "freeze"], cwd=None, runas=None, python_shell=False, @@ -1626,7 +1587,7 @@ def test_is_installed_false(): assert not ret -def test_install_pre_argument_in_resulting_command(): +def test_install_pre_argument_in_resulting_command(python_binary): pkg = "pep8" # Lower than 1.4 versions don't end up with `--pre` in the resulting output mock = MagicMock( @@ -1638,7 +1599,7 @@ def test_install_pre_argument_in_resulting_command(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): with patch("salt.modules.pip.version", MagicMock(return_value="1.3")): pip.install(pkg, pre_releases=True) - expected = [sys.executable, "-m", "pip", "install", pkg] + expected = [*python_binary, "install", pkg] mock.assert_called_with( expected, saltenv="base", @@ -1777,7 +1738,7 @@ def test_when_version_is_called_with_a_user_it_should_be_passed_to_undelying_run ) -def test_install_target_from_VENV_PIP_TARGET_in_resulting_command(): +def test_install_target_from_VENV_PIP_TARGET_in_resulting_command(python_binary): pkg = "pep8" target = "/tmp/foo" target_env = "/tmp/bar" @@ -1788,7 +1749,7 @@ def test_install_target_from_VENV_PIP_TARGET_in_resulting_command(): os, "environ", environment ): pip.install(pkg) - expected = [sys.executable, "-m", "pip", "install", "--target", target_env, pkg] + expected = [*python_binary, "install", "--target", target_env, pkg] mock.assert_called_with( expected, saltenv="base", @@ -1798,7 +1759,7 @@ def test_install_target_from_VENV_PIP_TARGET_in_resulting_command(): ) mock.reset_mock() pip.install(pkg, target=target) - expected = [sys.executable, "-m", "pip", "install", "--target", target, pkg] + expected = [*python_binary, "install", "--target", target, pkg] mock.assert_called_with( expected, saltenv="base", @@ -1808,7 +1769,7 @@ def test_install_target_from_VENV_PIP_TARGET_in_resulting_command(): ) -def test_list(): +def test_list(python_binary): json_out = dedent( """ [ @@ -1853,7 +1814,7 @@ def test_list(): with patch.dict(pip.__salt__, {"cmd.run_all": mock}): with patch("salt.modules.pip.version", MagicMock(return_value=mock_version)): ret = pip.list_() - expected = [sys.executable, "-m", "pip", "list", "--format=json"] + expected = [*python_binary, "list", "--format=json"] mock.assert_called_with( expected, cwd=None, diff --git a/tests/pytests/unit/modules/test_postgres.py b/tests/pytests/unit/modules/test_postgres.py index 393c087d1644..b828e8204b99 100644 --- a/tests/pytests/unit/modules/test_postgres.py +++ b/tests/pytests/unit/modules/test_postgres.py @@ -1,8 +1,18 @@ +import datetime +import re + import pytest +from pytestskipmarkers.utils import platform import salt.modules.config as configmod import salt.modules.postgres as postgres -from tests.support.mock import MagicMock, patch +from salt.exceptions import SaltInvocationError +from tests.support.mock import MagicMock, Mock, call, patch + +pytestmark = [ + pytest.mark.skip_unless_on_linux(reason="Only supported on Linux family"), +] + # 'md5' + md5('password' + 'username') md5_pw = "md55a231fcdb710d73268c4f44283487ba2" @@ -13,9 +23,47 @@ "LzAh/MGUdjYkdbDzcOKpfGwa3WwPUsyGcY+TEnSpcto=" ) -test_privileges_list_function_csv = ( - 'name\n"{baruwatest=X/baruwatest,bayestest=r/baruwatest,baruwa=X*/baruwatest}"\n' -) + +@pytest.fixture +def get_test_privileges_list_function_csv(): + return """name +"{baruwatest=X/baruwatest,bayestest=r/baruwatest,baruwa=X*/baruwatest}" +""" + + +@pytest.fixture +def get_test_list_db_csv(): + return """Name,Owner,Encoding,Collate,Ctype,Access privileges,Tablespace +template1,postgres,LATIN1,en_US,en_US,"{=c/postgres,postgres=CTc/postgres}",pg_default +template0,postgres,LATIN1,en_US,en_US,"{=c/postgres,postgres=CTc/postgres}",pg_default +postgres,postgres,LATIN1,en_US,en_US,,pg_default +test_db,postgres,LATIN1,en_US,en_US,,pg_default +""" + + +@pytest.fixture +def get_test_list_schema_csv(): + return """name,owner,acl +public,postgres,"{postgres=UC/postgres,=UC/postgres}" +pg_toast,postgres,"" +""" + + +@pytest.fixture +def get_test_list_language_csv(): + return "Name\ninternal\nc\nsql\nplpgsql\n" + + +@pytest.fixture +def get_test_privileges_list_table_csv(): + return """name +"{baruwatest=arwdDxt/baruwatest,bayestest=arwd/baruwatest,baruwa=a*r*w*d*D*x*t*/baruwatest}" +""" + + +@pytest.fixture +def get_test_privileges_list_group_csv(): + return "rolname,admin_option\nbaruwa,f\nbaruwatest2,t\nbaruwatest,f\n" @pytest.fixture @@ -70,14 +118,16 @@ def idfn(val): ids=idfn, ) def test_verify_password(role, password, verifier, method, result): + if platform.is_fips_enabled() and (method == "md5" or verifier == md5_pw): + pytest.skip("Test cannot run on a FIPS enabled platform") assert postgres._verify_password(role, password, verifier, method) == result -def test_has_privileges_with_function(): +def test_has_privileges_with_function(get_test_privileges_list_function_csv): with patch( "salt.modules.postgres._run_psql", MagicMock( - return_value={"retcode": 0, "stdout": test_privileges_list_function_csv} + return_value={"retcode": 0, "stdout": get_test_privileges_list_function_csv} ), ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): ret = postgres.has_privileges( @@ -181,3 +231,2342 @@ def test__run_initdb_with_timeout(): with patch.dict(configmod.__opts__, {"postgres.pass": None}): postgres._run_initdb("fakename", runas="saltuser") cmd_run_mock.assert_called_with(cmd_str, timeout=0, **kwargs) + + +def test_run_psql(): + postgres._run_psql('echo "hi"') + cmd = postgres.__salt__["cmd.run_all"] + assert cmd.call_args[1]["runas"] == "postgres" + + +def test_db_alter(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + ret = postgres.db_alter( + "dbname", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + tablespace="testspace", + owner="otheruser", + runas="foo", + ) + assert ret is True + + postgres._run_psql.assert_has_calls( + [ + call( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'ALTER DATABASE "dbname" OWNER TO "otheruser"', + ], + host="testhost", + user="testuser", + password="foo", + runas="foo", + port="testport", + ), + call( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'ALTER DATABASE "dbname" SET TABLESPACE "testspace"', + ], + host="testhost", + user="testuser", + password="foo", + runas="foo", + port="testport", + ), + ] + ) + + +def test_db_alter_owner_recurse(): + with patch( + "salt.modules.postgres.owner_to", Mock(return_value={"retcode": None}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + postgres.db_alter( + "dbname", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + tablespace="testspace", + owner="otheruser", + owner_recurse=True, + runas="foo", + ) + postgres.owner_to.assert_called_once_with( + "dbname", + "otheruser", + user="testuser", + host="testhost", + port="testport", + password="foo", + runas="foo", + ) + + +def test_db_create(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + postgres.db_create( + "dbname", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + tablespace="testspace", + owner="otheruser", + runas="foo", + ) + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'CREATE DATABASE "dbname" WITH TABLESPACE = "testspace" ' + 'OWNER = "otheruser"', + ], + host="testhost", + user="testuser", + password="foo", + runas="foo", + port="testport", + ) + + +def test_db_create_empty_string_param(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + postgres.db_create( + "dbname", + lc_collate="", + encoding="utf8", + user="testuser", + host="testhost", + port=1234, + maintenance_db="maint_db", + password="foo", + ) + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "1234", + "--dbname", + "maint_db", + "-c", + "CREATE DATABASE \"dbname\" WITH ENCODING = 'utf8' LC_COLLATE = ''", + ], + host="testhost", + password="foo", + port=1234, + runas=None, + user="testuser", + ) + + +def test_db_create_with_trivial_sql_injection(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + pytest.raises( + SaltInvocationError, + postgres.db_create, + "dbname", + lc_collate="foo' ENCODING='utf8", + ) + + +def test_db_exists(get_test_list_db_csv): + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": get_test_list_db_csv}), + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + ret = postgres.db_exists( + "test_db", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + ) + assert ret is True + + +def test_db_list(get_test_list_db_csv): + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": get_test_list_db_csv}), + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + ret = postgres.db_list( + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + ) + assert ret == { + "test_db": { + "Encoding": "LATIN1", + "Ctype": "en_US", + "Tablespace": "pg_default", + "Collate": "en_US", + "Owner": "postgres", + "Access privileges": "", + }, + "template1": { + "Encoding": "LATIN1", + "Ctype": "en_US", + "Tablespace": "pg_default", + "Collate": "en_US", + "Owner": "postgres", + "Access privileges": "{=c/postgres,postgres=CTc/postgres}", + }, + "template0": { + "Encoding": "LATIN1", + "Ctype": "en_US", + "Tablespace": "pg_default", + "Collate": "en_US", + "Owner": "postgres", + "Access privileges": "{=c/postgres,postgres=CTc/postgres}", + }, + "postgres": { + "Encoding": "LATIN1", + "Ctype": "en_US", + "Tablespace": "pg_default", + "Collate": "en_US", + "Owner": "postgres", + "Access privileges": "", + }, + } + + +def test_db_remove(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + postgres.db_remove( + "test_db", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + ) + + calls = ( + call( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'REVOKE CONNECT ON DATABASE "test_db" FROM public;', + ], + host="testhost", + password="foo", + port="testport", + runas="foo", + user="testuser", + ), + call( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + "SELECT pid, pg_terminate_backend(pid) FROM pg_stat_activity" + " WHERE datname = 'test_db' AND pid <> pg_backend_pid();", + ], + host="testhost", + password="foo", + port="testport", + runas="foo", + user="testuser", + ), + call( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'DROP DATABASE "test_db";', + ], + host="testhost", + password="foo", + port="testport", + runas="foo", + user="testuser", + ), + ) + + postgres._run_psql.assert_has_calls(calls, any_order=True) + + +def test_group_create(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.user_exists", Mock(return_value=False) + ): + postgres.group_create( + "testgroup", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + createdb=False, + encrypted=False, + superuser=False, + replication=False, + rolepassword="testrolepass", + groups="testgroup", + runas="foo", + ) + # postgres._run_psql.call_args[0][0] will contain the list of CLI args. + # The first 14 elements of this list are initial args used in all (or + # virtually all) commands run through _run_psql(), so the actual SQL + # query will be in the 15th argument. + assert postgres._run_psql.call_args[0][0][14].startswith("CREATE ROLE") + + +def test_group_remove(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.user_exists", Mock(return_value=True) + ): + postgres.group_remove( + "testgroup", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + ) + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'DROP ROLE "testgroup"', + ], + host="testhost", + user="testuser", + password="foo", + runas="foo", + port="testport", + ) + + +def test_group_update(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.modules.postgres.role_get", + Mock(return_value={"superuser": False}), + ): + postgres.group_update( + "testgroup", + user='"testuser"', + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + createdb=False, + encrypted=False, + replication=False, + rolepassword="test_role_pass", + groups="testgroup", + runas="foo", + ) + # postgres._run_psql.call_args[0][0] will contain the list of CLI args. + # The first 14 elements of this list are initial args used in all (or + # virtually all) commands run through _run_psql(), so the actual SQL + # query will be in the 15th argument. + assert re.match( + 'ALTER.* "testgroup" .* UNENCRYPTED PASSWORD', + postgres._run_psql.call_args[0][0][14], + ) + + +def test_user_create(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.modules.postgres.user_exists", Mock(return_value=False)): + postgres.user_create( + "testuser", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_test", + password="test_pass", + login=True, + createdb=False, + createroles=False, + encrypted=False, + superuser=False, + replication=False, + rolepassword="test_role_pass", + valid_until="2042-07-01", + groups="test_groups", + runas="foo", + ) + # postgres._run_psql.call_args[0][0] will contain the list of CLI args. + # The first 14 elements of this list are initial args used in all (or + # virtually all) commands run through _run_psql(), so the actual SQL + # query will be in the 15th argument. + call = postgres._run_psql.call_args[0][0][14] + assert re.match('CREATE ROLE "testuser"', call) + for i in ( + "INHERIT", + "NOCREATEDB", + "NOCREATEROLE", + "NOSUPERUSER", + "NOREPLICATION", + "LOGIN", + "UNENCRYPTED", + "PASSWORD", + "VALID UNTIL", + ): + assert i in call, f"{i} not in {call}" + + +def test_user_exists(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.modules.postgres.version", Mock(return_value="9.1")), patch( + "salt.modules.postgres.psql_query", + Mock( + return_value=[ + { + "name": "test_user", + "superuser": "t", + "inherits privileges": "t", + "can create roles": "t", + "can create databases": "t", + "can update system catalogs": "t", + "can login": "t", + "replication": None, + "password": "test_password", + "connections": "-1", + "groups": "", + "expiry time": "", + "defaults variables": None, + } + ] + ), + ): + ret = postgres.user_exists( + "test_user", + user="test_user", + host="test_host", + port="test_port", + maintenance_db="maint_db", + password="test_password", + runas="foo", + ) + assert ret is True + + +def test_user_list(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.modules.postgres.version", Mock(return_value="9.1")), patch( + "salt.modules.postgres.psql_query", + Mock( + return_value=[ + { + "name": "test_user", + "superuser": "t", + "inherits privileges": "t", + "can create roles": "t", + "can create databases": "t", + "can update system catalogs": "t", + "can login": "t", + "replication": None, + "connections": "-1", + "groups": "", + "expiry time": "2017-08-16 08:57:46", + "defaults variables": None, + } + ] + ), + ): + ret = postgres.user_list( + "test_user", + host="test_host", + port="test_port", + maintenance_db="maint_db", + password="test_password", + runas="foo", + ) + + assert ret == { + "test_user": { + "superuser": True, + "defaults variables": None, + "can create databases": True, + "can create roles": True, + "connections": None, + "replication": None, + "expiry time": datetime.datetime(2017, 8, 16, 8, 57, 46), + "can login": True, + "can update system catalogs": True, + "groups": [], + "inherits privileges": True, + } + } + + +def test_user_remove(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.version", Mock(return_value="9.1") + ), patch( + "salt.modules.postgres.user_exists", Mock(return_value=True) + ): + postgres.user_remove( + "testuser", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="testpassword", + runas="foo", + ) + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'DROP ROLE "testuser"', + ], + host="testhost", + port="testport", + user="testuser", + password="testpassword", + runas="foo", + ) + + +def test_user_update(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.modules.postgres.role_get", + Mock(return_value={"superuser": False}), + ): + postgres.user_update( + "test_username", + user="test_user", + host="test_host", + port="test_port", + maintenance_db="test_maint", + password="test_pass", + createdb=False, + createroles=False, + encrypted=False, + inherit=True, + login=True, + replication=False, + rolepassword="test_role_pass", + valid_until="2017-07-01", + groups="test_groups", + runas="foo", + ) + # postgres._run_psql.call_args[0][0] will contain the list of CLI args. + # The first 14 elements of this list are initial args used in all (or + # virtually all) commands run through _run_psql(), so the actual SQL + # query will be in the 15th argument. + assert re.match( + 'ALTER ROLE "test_username" WITH INHERIT NOCREATEDB ' + "NOCREATEROLE NOREPLICATION LOGIN " + "UNENCRYPTED PASSWORD ['\"]{0,5}test_role_pass['\"]{0,5} " + "VALID UNTIL '2017-07-01';" + ' GRANT "test_groups" TO "test_username"', + postgres._run_psql.call_args[0][0][14], + ) + + +def test_user_update2(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.modules.postgres.role_get", + Mock(return_value={"superuser": False}), + ): + postgres.user_update( + "test_username", + user="test_user", + host="test_host", + port="test_port", + maintenance_db="test_maint", + password="test_pass", + createdb=False, + createroles=True, + encrypted=False, + inherit=True, + login=True, + replication=False, + groups="test_groups", + runas="foo", + ) + # postgres._run_psql.call_args[0][0] will contain the list of CLI args. + # The first 14 elements of this list are initial args used in all (or + # virtually all) commands run through _run_psql(), so the actual SQL + # query will be in the 15th argument. + assert re.match( + 'ALTER ROLE "test_username" WITH INHERIT NOCREATEDB ' + "CREATEROLE NOREPLICATION LOGIN;" + ' GRANT "test_groups" TO "test_username"', + postgres._run_psql.call_args[0][0][14], + ) + + +def test_user_update3(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.modules.postgres.role_get", + Mock(return_value={"superuser": False}), + ): + postgres.user_update( + "test_username", + user="test_user", + host="test_host", + port="test_port", + maintenance_db="test_maint", + password="test_pass", + createdb=False, + createroles=True, + encrypted=False, + inherit=True, + login=True, + rolepassword=False, + replication=False, + groups="test_groups", + runas="foo", + ) + # postgres._run_psql.call_args[0][0] will contain the list of CLI args. + # The first 14 elements of this list are initial args used in all (or + # virtually all) commands run through _run_psql(), so the actual SQL + # query will be in the 15th argument. + assert re.match( + 'ALTER ROLE "test_username" WITH INHERIT NOCREATEDB ' + "CREATEROLE NOREPLICATION LOGIN NOPASSWORD;" + ' GRANT "test_groups" TO "test_username"', + postgres._run_psql.call_args[0][0][14], + ) + + +@pytest.mark.skip_on_fips_enabled_platform +def test_user_update_encrypted_passwd(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.modules.postgres.role_get", + Mock(return_value={"superuser": False}), + ): + postgres.user_update( + "test_username", + user="test_user", + host="test_host", + port="test_port", + maintenance_db="test_maint", + password="test_pass", + createdb=False, + createroles=True, + encrypted=True, + inherit=True, + login=True, + rolepassword="foobar", + replication=False, + groups="test_groups", + runas="foo", + ) + # postgres._run_psql.call_args[0][0] will contain the list of CLI args. + # The first 14 elements of this list are initial args used in all (or + # virtually all) commands run through _run_psql(), so the actual SQL + # query will be in the 15th argument. + assert re.match( + 'ALTER ROLE "test_username" WITH INHERIT NOCREATEDB ' + "CREATEROLE NOREPLICATION LOGIN " + "ENCRYPTED PASSWORD " + "['\"]{0,5}md531c27e68d3771c392b52102c01be1da1['\"]{0,5}" + '; GRANT "test_groups" TO "test_username"', + postgres._run_psql.call_args[0][0][14], + ) + + +def test_version(): + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": "9.1.9"}), + ): + postgres.version( + user="test_user", + host="test_host", + port="test_port", + maintenance_db="test_maint", + password="test_pass", + runas="foo", + ) + # postgres._run_psql.call_args[0][0] will contain the list of CLI args. + # The first 14 elements of this list are initial args used in all (or + # virtually all) commands run through _run_psql(), so the actual SQL + # query will be in the 15th argument. + assert re.match( + "SELECT setting FROM pg_catalog.pg_settings", + postgres._run_psql.call_args[0][0][14], + ) + + +def test_installed_extensions(): + with patch( + "salt.modules.postgres.psql_query", + Mock(return_value=[{"extname": "foo", "extversion": "1"}]), + ): + exts = postgres.installed_extensions() + assert exts == {"foo": {"extversion": "1", "extname": "foo"}} + + +def test_available_extensions(): + with patch( + "salt.modules.postgres.psql_query", + Mock(return_value=[{"name": "foo", "default_version": "1"}]), + ): + exts = postgres.available_extensions() + assert exts == {"foo": {"default_version": "1", "name": "foo"}} + + +def test_drop_extension2(): + with patch( + "salt.modules.postgres.installed_extensions", Mock(side_effect=[{}, {}]) + ): + with patch( + "salt.modules.postgres._psql_prepare_and_run", Mock(return_value=None) + ): + with patch( + "salt.modules.postgres.available_extensions", + Mock(return_value={"foo": {"default_version": "1", "name": "foo"}}), + ): + assert postgres.drop_extension("foo") + + +def test_drop_extension3(): + with patch( + "salt.modules.postgres.installed_extensions", + Mock(side_effect=[{"foo": {"extversion": "1", "extname": "foo"}}, {}]), + ): + with patch( + "salt.modules.postgres._psql_prepare_and_run", Mock(return_value=None) + ): + with patch( + "salt.modules.postgres.available_extensions", + Mock(return_value={"foo": {"default_version": "1", "name": "foo"}}), + ): + assert postgres.drop_extension("foo") + + +def test_drop_extension1(): + with patch( + "salt.modules.postgres.installed_extensions", + Mock( + side_effect=[ + {"foo": {"extversion": "1", "extname": "foo"}}, + {"foo": {"extversion": "1", "extname": "foo"}}, + ] + ), + ): + with patch( + "salt.modules.postgres._psql_prepare_and_run", Mock(return_value=None) + ): + with patch( + "salt.modules.postgres.available_extensions", + Mock(return_value={"foo": {"default_version": "1", "name": "foo"}}), + ): + assert not postgres.drop_extension("foo") + + +def test_create_mtdata(): + with patch( + "salt.modules.postgres.installed_extensions", + Mock( + return_value={ + "foo": { + "extversion": "0.8", + "extrelocatable": "t", + "schema_name": "foo", + "extname": "foo", + } + }, + ), + ): + with patch( + "salt.modules.postgres.available_extensions", + Mock(return_value={"foo": {"default_version": "1.4", "name": "foo"}}), + ): + ret = postgres.create_metadata("foo", schema="bar", ext_version="1.4") + assert postgres._EXTENSION_INSTALLED in ret + assert postgres._EXTENSION_TO_UPGRADE in ret + assert postgres._EXTENSION_TO_MOVE in ret + + ret = postgres.create_metadata("foo", schema="foo", ext_version="0.4") + assert postgres._EXTENSION_INSTALLED in ret + assert postgres._EXTENSION_TO_UPGRADE not in ret + assert postgres._EXTENSION_TO_MOVE not in ret + + ret = postgres.create_metadata("foo") + assert postgres._EXTENSION_INSTALLED in ret + assert postgres._EXTENSION_TO_UPGRADE not in ret + assert postgres._EXTENSION_TO_MOVE not in ret + + ret = postgres.create_metadata("foobar") + assert postgres._EXTENSION_NOT_INSTALLED in ret + assert postgres._EXTENSION_INSTALLED not in ret + assert postgres._EXTENSION_TO_UPGRADE not in ret + assert postgres._EXTENSION_TO_MOVE not in ret + + +def test_create_extension_newerthan(): + """ + scenario of creating upgrading extensions with possible schema and + version specifications + """ + with patch( + "salt.modules.postgres.create_metadata", + Mock( + side_effect=[ + # create succeeded + [postgres._EXTENSION_NOT_INSTALLED], + [postgres._EXTENSION_INSTALLED], + [postgres._EXTENSION_NOT_INSTALLED], + [postgres._EXTENSION_INSTALLED], + # create failed + [postgres._EXTENSION_NOT_INSTALLED], + [postgres._EXTENSION_NOT_INSTALLED], + # move+upgrade succeeded + [ + postgres._EXTENSION_TO_MOVE, + postgres._EXTENSION_TO_UPGRADE, + postgres._EXTENSION_INSTALLED, + ], + [postgres._EXTENSION_INSTALLED], + # move succeeded + [postgres._EXTENSION_TO_MOVE, postgres._EXTENSION_INSTALLED], + [postgres._EXTENSION_INSTALLED], + # upgrade succeeded + [postgres._EXTENSION_TO_UPGRADE, postgres._EXTENSION_INSTALLED], + [postgres._EXTENSION_INSTALLED], + # upgrade failed + [postgres._EXTENSION_TO_UPGRADE, postgres._EXTENSION_INSTALLED], + [postgres._EXTENSION_TO_UPGRADE, postgres._EXTENSION_INSTALLED], + # move failed + [postgres._EXTENSION_TO_MOVE, postgres._EXTENSION_INSTALLED], + [postgres._EXTENSION_TO_MOVE, postgres._EXTENSION_INSTALLED], + ] + ), + ): + with patch( + "salt.modules.postgres._psql_prepare_and_run", Mock(return_value=None) + ): + with patch( + "salt.modules.postgres.available_extensions", + Mock(return_value={"foo": {"default_version": "1.4", "name": "foo"}}), + ): + assert postgres.create_extension("foo") + assert re.match( + 'CREATE EXTENSION IF NOT EXISTS "foo" ;', + postgres._psql_prepare_and_run.call_args[0][0][1], + ) + + assert postgres.create_extension( + "foo", schema="a", ext_version="b", from_version="c" + ) + assert re.match( + 'CREATE EXTENSION IF NOT EXISTS "foo" ' + 'WITH SCHEMA "a" VERSION b FROM c ;', + postgres._psql_prepare_and_run.call_args[0][0][1], + ) + assert not postgres.create_extension("foo") + + ret = postgres.create_extension("foo", ext_version="a", schema="b") + assert ret is True + assert re.match( + 'ALTER EXTENSION "foo" SET SCHEMA "b";' + ' ALTER EXTENSION "foo" UPDATE TO a;', + postgres._psql_prepare_and_run.call_args[0][0][1], + ) + + ret = postgres.create_extension("foo", ext_version="a", schema="b") + assert ret is True + assert re.match( + 'ALTER EXTENSION "foo" SET SCHEMA "b";', + postgres._psql_prepare_and_run.call_args[0][0][1], + ) + + ret = postgres.create_extension("foo", ext_version="a", schema="b") + assert ret is True + assert re.match( + 'ALTER EXTENSION "foo" UPDATE TO a;', + postgres._psql_prepare_and_run.call_args[0][0][1], + ) + assert not postgres.create_extension("foo", ext_version="a", schema="b") + assert not postgres.create_extension("foo", ext_version="a", schema="b") + + +@pytest.mark.skip_on_fips_enabled_platform +def test_encrypt_passwords(): + assert postgres._maybe_encrypt_password("foo", "bar", False) == "bar" + assert ( + postgres._maybe_encrypt_password("foo", "bar", True) + == "md596948aad3fcae80c08a35c9b5958cd89" + ) + + +def test_schema_list(get_test_list_schema_csv): + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": get_test_list_schema_csv}), + ): + ret = postgres.schema_list( + "maint_db", + db_user="testuser", + db_host="testhost", + db_port="testport", + db_password="foo", + ) + assert ret == { + "public": { + "acl": "{postgres=UC/postgres,=UC/postgres}", + "owner": "postgres", + }, + "pg_toast": {"acl": "", "owner": "postgres"}, + } + + +def test_schema_exists(): + with patch("salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0})): + with patch( + "salt.modules.postgres.psql_query", + Mock( + return_value=[ + { + "name": "public", + "acl": "{postgres=UC/postgres,=UC/postgres}", + "owner": "postgres", + } + ] + ), + ): + ret = postgres.schema_exists("template1", "public") + assert ret is True + + +def test_schema_get(): + with patch("salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0})): + with patch( + "salt.modules.postgres.psql_query", + Mock( + return_value=[ + { + "name": "public", + "acl": "{postgres=UC/postgres,=UC/postgres}", + "owner": "postgres", + } + ] + ), + ): + ret = postgres.schema_get("template1", "public") + assert ret == { + "acl": "{postgres=UC/postgres,=UC/postgres}", + "owner": "postgres", + } + + +def test_schema_get_again(): + with patch("salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0})): + with patch( + "salt.modules.postgres.psql_query", + Mock( + return_value=[ + { + "name": "public", + "acl": "{postgres=UC/postgres,=UC/postgres}", + "owner": "postgres", + } + ] + ), + ): + ret = postgres.schema_get("template1", "pg_toast") + assert ret is None + + +def test_schema_create(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + with patch("salt.modules.postgres.schema_exists", Mock(return_value=False)): + postgres.schema_create( + "maint_db", + "testschema", + user="user", + db_host="testhost", + db_port="testport", + db_user="testuser", + db_password="testpassword", + ) + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'CREATE SCHEMA "testschema"', + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_schema_create2(): + with patch("salt.modules.postgres.schema_exists", Mock(return_value=True)): + ret = postgres.schema_create( + "test_db", + "test_schema", + user="user", + db_host="test_host", + db_port="test_port", + db_user="test_user", + db_password="test_password", + ) + assert ret is False + + +def test_schema_remove(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + with patch("salt.modules.postgres.schema_exists", Mock(return_value=True)): + postgres.schema_remove( + "maint_db", + "testschema", + user="user", + db_host="testhost", + db_port="testport", + db_user="testuser", + db_password="testpassword", + ) + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'DROP SCHEMA "testschema"', + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_schema_remove2(): + with patch("salt.modules.postgres.schema_exists", Mock(return_value=False)): + ret = postgres.schema_remove( + "test_db", + "test_schema", + user="user", + db_host="test_host", + db_port="test_port", + db_user="test_user", + db_password="test_password", + ) + assert ret is False + + +def test_language_list(get_test_list_language_csv): + """ + Test language listing + """ + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": get_test_list_language_csv}), + ): + ret = postgres.language_list( + "testdb", + user="testuser", + host="testhost", + port="testport", + password="foo", + ) + assert ret == { + "c": "c", + "internal": "internal", + "plpgsql": "plpgsql", + "sql": "sql", + } + + +def test_language_exists(): + """ + Test language existence check + """ + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.modules.postgres.psql_query", + Mock( + return_value=[ + {"Name": "internal"}, + {"Name": "c"}, + {"Name": "sql"}, + {"Name": "plpgsql"}, + ] + ), + ), patch( + "salt.modules.postgres.language_exists", Mock(return_value=True) + ): + ret = postgres.language_exists("sql", "testdb") + assert ret is True + + +def test_language_create(): + """ + Test language creation - does not exist in db + """ + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + with patch("salt.modules.postgres.language_exists", Mock(return_value=False)): + postgres.language_create( + "plpythonu", + "testdb", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "testdb", + "-c", + "CREATE LANGUAGE plpythonu", + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_language_create_exists(): + """ + Test language creation - already exists in db + """ + with patch("salt.modules.postgres.language_exists", Mock(return_value=True)): + ret = postgres.language_create( + "plpythonu", + "testdb", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is False + + +def test_language_remove(): + """ + Test language removal - exists in db + """ + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + with patch("salt.modules.postgres.language_exists", Mock(return_value=True)): + postgres.language_remove( + "plpgsql", + "testdb", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "testdb", + "-c", + "DROP LANGUAGE plpgsql", + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_language_remove_non_exist(): + """ + Test language removal - does not exist in db + """ + with patch("salt.modules.postgres.language_exists", Mock(return_value=False)): + ret = postgres.language_remove( + "plpgsql", + "testdb", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is False + + +def test_privileges_list_table(get_test_privileges_list_table_csv): + """ + Test privilege listing on a table + """ + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": get_test_privileges_list_table_csv}), + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + ret = postgres.privileges_list( + "awl", + "table", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + expected = { + "bayestest": { + "INSERT": False, + "UPDATE": False, + "SELECT": False, + "DELETE": False, + }, + "baruwa": { + "INSERT": True, + "TRUNCATE": True, + "UPDATE": True, + "TRIGGER": True, + "REFERENCES": True, + "SELECT": True, + "DELETE": True, + }, + "baruwatest": { + "INSERT": False, + "TRUNCATE": False, + "UPDATE": False, + "TRIGGER": False, + "REFERENCES": False, + "SELECT": False, + "DELETE": False, + }, + } + assert ret == expected + + query = ( + "COPY (SELECT relacl AS name FROM pg_catalog.pg_class c " + "JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace " + "WHERE nspname = 'public' AND relname = 'awl' AND relkind in ('r', 'v') " + "ORDER BY relname) TO STDOUT WITH CSV HEADER" + ) + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-v", + "datestyle=ISO,MDY", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_privileges_list_group(get_test_privileges_list_group_csv): + """ + Test privilege listing on a group + """ + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": get_test_privileges_list_group_csv}), + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + ret = postgres.privileges_list( + "admin", + "group", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + expected = { + "baruwa": False, + "baruwatest": False, + "baruwatest2": True, + } + assert ret == expected + + query = ( + "COPY (SELECT rolname, admin_option " + "FROM pg_catalog.pg_auth_members m JOIN pg_catalog.pg_roles r " + "ON m.member=r.oid WHERE m.roleid IN (SELECT oid FROM " + "pg_catalog.pg_roles WHERE rolname='admin') ORDER BY rolname) " + "TO STDOUT WITH CSV HEADER" + ) + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-v", + "datestyle=ISO,MDY", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_has_privileges_on_table(get_test_privileges_list_table_csv): + """ + Test privilege checks on table + """ + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": get_test_privileges_list_table_csv}), + ): + ret = postgres.has_privileges( + "baruwa", + "awl", + "table", + "SELECT,INSERT", + grant_option=True, + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is True + + ret = postgres.has_privileges( + "baruwa", + "awl", + "table", + "ALL", + grant_option=True, + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is True + + ret = postgres.has_privileges( + "baruwa", + "awl", + "table", + "ALL", + grant_option=False, + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is True + + ret = postgres.has_privileges( + "bayestest", + "awl", + "table", + "SELECT,INSERT,TRUNCATE", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is False + + ret = postgres.has_privileges( + "bayestest", + "awl", + "table", + "SELECT,INSERT", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is True + + +def test_has_privileges_on_group(get_test_privileges_list_group_csv): + """ + Test privilege checks on group + """ + with patch( + "salt.modules.postgres._run_psql", + Mock(return_value={"retcode": 0, "stdout": get_test_privileges_list_group_csv}), + ): + ret = postgres.has_privileges( + "baruwa", + "admin", + "group", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is True + + ret = postgres.has_privileges( + "baruwa", + "admin", + "group", + grant_option=True, + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is False + + ret = postgres.has_privileges( + "tony", + "admin", + "group", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + assert ret is False + + +def test_privileges_grant_table(): + """ + Test granting privileges on table + """ + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + with patch("salt.modules.postgres.has_privileges", Mock(return_value=False)): + ret = postgres.privileges_grant( + "baruwa", + "awl", + "table", + "ALL", + grant_option=True, + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + + query = 'GRANT ALL ON TABLE public."awl" TO "baruwa" WITH GRANT OPTION' + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.has_privileges", Mock(return_value=False) + ): + ret = postgres.privileges_grant( + "baruwa", + "awl", + "table", + "ALL", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + + query = 'GRANT ALL ON TABLE public."awl" TO "baruwa"' + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + # Test grant on all tables + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.has_privileges", Mock(return_value=False) + ): + ret = postgres.privileges_grant( + "baruwa", + "ALL", + "table", + "SELECT", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + + query = 'GRANT SELECT ON ALL TABLES IN SCHEMA public TO "baruwa"' + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_privileges_grant_group(): + """ + Test granting privileges on group + """ + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.has_privileges", Mock(return_value=False) + ): + ret = postgres.privileges_grant( + "baruwa", + "admins", + "group", + grant_option=True, + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + + query = 'GRANT admins TO "baruwa" WITH ADMIN OPTION' + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.has_privileges", Mock(return_value=False) + ): + ret = postgres.privileges_grant( + "baruwa", + "admins", + "group", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + + query = 'GRANT admins TO "baruwa"' + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_privileges_revoke_table(): + """ + Test revoking privileges on table + """ + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.has_privileges", Mock(return_value=True) + ): + ret = postgres.privileges_revoke( + "baruwa", + "awl", + "table", + "ALL", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + + query = "REVOKE ALL ON TABLE public.awl FROM baruwa" + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_privileges_revoke_group(): + """ + Test revoking privileges on group + """ + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")), patch( + "salt.modules.postgres.has_privileges", Mock(return_value=True) + ): + ret = postgres.privileges_revoke( + "baruwa", + "admins", + "group", + maintenance_db="db_name", + runas="user", + host="testhost", + port="testport", + user="testuser", + password="testpassword", + ) + + query = "REVOKE admins FROM baruwa" + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "db_name", + "-c", + query, + ], + host="testhost", + port="testport", + password="testpassword", + user="testuser", + runas="user", + ) + + +def test_datadir_init(): + """ + Test Initializing a postgres data directory + """ + with patch("salt.modules.postgres._run_initdb", Mock(return_value={"retcode": 0})): + with patch("salt.modules.postgres.datadir_exists", Mock(return_value=False)): + name = "/var/lib/pgsql/data" + ret = postgres.datadir_init( + name, user="postgres", password="test", runas="postgres" + ) + postgres._run_initdb.assert_called_once_with( + name, + auth="password", + encoding="UTF8", + locale=None, + password="test", + runas="postgres", + checksums=False, + waldir=None, + user="postgres", + ) + assert ret is True + + +def test_datadir_exists(): + """ + Test Checks if postgres data directory has been initialized + """ + with patch("os.path.isfile", Mock(return_value=True)): + name = "/var/lib/pgsql/data" + ret = postgres.datadir_exists(name) + assert ret is True + + +@pytest.mark.parametrize( + "v1,v2,result", + ( + ("8.5", "9.5", True), + ("8.5", "8.6", True), + ("8.5.2", "8.5.3", True), + ("9.5", "8.5", False), + ("9.5", "9.6", True), + ("9.5.0", "9.5.1", True), + ("9.5", "9.5.1", True), + ("9.5.1", "9.5", False), + ("9.5b", "9.5a", False), + ("10a", "10b", True), + ("1.2.3.4", "1.2.3.5", True), + ("10dev", "10next", True), + ("10next", "10dev", False), + ), +) +def test_pg_is_older_ext_ver(v1, v2, result): + """ + Test Checks if postgres extension version string is older + """ + assert postgres._pg_is_older_ext_ver(v1, v2) is result + + +def test_tablespace_create(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + postgres.tablespace_create( + "test_tablespace", + "/tmp/postgres_test_tablespace", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + owner="otheruser", + runas="foo", + ) + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'CREATE TABLESPACE "test_tablespace" OWNER "otheruser" LOCATION \'/tmp/postgres_test_tablespace\' ', + ], + runas="foo", + password="foo", + host="testhost", + port="testport", + user="testuser", + ) + + +def test_tablespace_list(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql") + ), patch.dict( + postgres.__salt__, + { + "postgres.psql_query": MagicMock( + return_value=[ + { + "Name": "pg_global", + "Owner": "postgres", + "ACL": "", + "Opts": "", + "Location": "", + }, + { + "Name": "pg_default", + "Owner": "postgres", + "ACL": "", + "Opts": "", + "Location": "", + }, + { + "Name": "test_tablespace", + "Owner": "testuser", + "ACL": "", + "Opts": "", + "Location": "/tmp/posgrest_test_tablespace", + }, + ] + ), + }, + ): + ret = postgres.tablespace_list( + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + ) + + expected_data = { + "pg_global": {"Owner": "postgres", "ACL": "", "Opts": "", "Location": ""}, + "pg_default": {"Owner": "postgres", "ACL": "", "Opts": "", "Location": ""}, + "test_tablespace": { + "Owner": "testuser", + "ACL": "", + "Opts": "", + "Location": "/tmp/posgrest_test_tablespace", + }, + } + assert ret == expected_data + + +def test_tablespace_exists_true(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql") + ), patch.dict( + postgres.__salt__, + { + "postgres.psql_query": MagicMock( + return_value=[ + { + "Name": "pg_global", + "Owner": "postgres", + "ACL": "", + "Opts": "", + "Location": "", + }, + { + "Name": "pg_default", + "Owner": "postgres", + "ACL": "", + "Opts": "", + "Location": "", + }, + { + "Name": "test_tablespace", + "Owner": "testuser", + "ACL": "", + "Opts": "", + "Location": "/tmp/posgrest_test_tablespace", + }, + ] + ), + }, + ): + ret = postgres.tablespace_exists( + "test_tablespace", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + ) + assert ret is True + + +def test_tablespace_exists_false(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch( + "salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql") + ), patch.dict( + postgres.__salt__, + { + "postgres.psql_query": MagicMock( + return_value=[ + { + "Name": "pg_global", + "Owner": "postgres", + "ACL": "", + "Opts": "", + "Location": "", + }, + { + "Name": "pg_default", + "Owner": "postgres", + "ACL": "", + "Opts": "", + "Location": "", + }, + { + "Name": "test_tablespace", + "Owner": "testuser", + "ACL": "", + "Opts": "", + "Location": "/tmp/posgrest_test_tablespace", + }, + ] + ), + }, + ): + ret = postgres.tablespace_exists( + "bad_test_tablespace", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + ) + assert ret is False + + +def test_tablespace_alter_new_owner(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + postgres.tablespace_alter( + "test_tablespace", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + new_owner="testuser", + ) + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'ALTER TABLESPACE "test_tablespace" OWNER TO "testuser"', + ], + runas="foo", + password="foo", + host="testhost", + port="testport", + user="testuser", + ) + + +def test_tablespace_alter_new_name(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + postgres.tablespace_alter( + "test_tablespace", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + new_name="test_tablespace2", + ) + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'ALTER TABLESPACE "test_tablespace" RENAME TO "test_tablespace2"', + ], + runas="foo", + password="foo", + host="testhost", + port="testport", + user="testuser", + ) + + +def test_tablespace_remove(): + with patch( + "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) + ), patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/pgsql")): + postgres.tablespace_remove( + "test_tablespace", + user="testuser", + host="testhost", + port="testport", + maintenance_db="maint_db", + password="foo", + runas="foo", + ) + + postgres._run_psql.assert_called_once_with( + [ + "/usr/bin/pgsql", + "--no-align", + "--no-readline", + "--no-psqlrc", + "--no-password", + "--username", + "testuser", + "--host", + "testhost", + "--port", + "testport", + "--dbname", + "maint_db", + "-c", + 'DROP TABLESPACE "test_tablespace"', + ], + runas="foo", + password="foo", + host="testhost", + port="testport", + user="testuser", + ) diff --git a/tests/pytests/unit/modules/test_ps.py b/tests/pytests/unit/modules/test_ps.py index 074bf9007e15..99540b243d21 100644 --- a/tests/pytests/unit/modules/test_ps.py +++ b/tests/pytests/unit/modules/test_ps.py @@ -6,7 +6,8 @@ import salt.modules.ps import salt.modules.ps as ps import salt.utils.data -from salt.exceptions import SaltInvocationError +import salt.utils.platform +from salt.exceptions import CommandExecutionError, SaltInvocationError from tests.support.mock import MagicMock, Mock, call, patch psutil = pytest.importorskip("salt.utils.psutil_compat") @@ -14,6 +15,11 @@ # TestCase Exceptions are tested in tests/unit/modules/test_ps.py +@pytest.fixture +def configure_loader_modules(): + return {ps: {}} + + @pytest.fixture def sample_process(): status = b"fnord" @@ -135,9 +141,13 @@ def test__status_when_some_matching_processes_then_only_correct_info_should_be_r HAS_PSUTIL_VERSION = False -PSUTIL2 = psutil.version_info >= (2, 0) - STUB_CPU_TIMES = namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4) +STUB_CPU_TIMES_PERCPU = [ + namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4), + namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4), + namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4), + namedtuple("cputimes", "user nice system idle")(1, 2, 3, 4), +] STUB_VIRT_MEM = namedtuple("vmem", "total available percent used free")( 1000, 500, 50, 500, 500 ) @@ -153,9 +163,39 @@ def test__status_when_some_matching_processes_then_only_correct_info_should_be_r "iostat", "bytes_sent, bytes_recv, packets_sent, packets_recv, errin errout dropin dropout", )(1000, 2000, 500, 600, 1, 2, 3, 4) +STUB_NETWORK_IO_PERNIC = { + "lo": STUB_NETWORK_IO, + "eth0": STUB_NETWORK_IO, + "eth1": STUB_NETWORK_IO, +} STUB_DISK_IO = namedtuple( "iostat", "read_count, write_count, read_bytes, write_bytes, read_time, write_time" )(1000, 2000, 500, 600, 2000, 3000) +STUB_DISK_IO_PERDISK = { + "nvme0n1": STUB_DISK_IO, + "nvme0n1p1": STUB_DISK_IO, + "nvme0n1p2": STUB_DISK_IO, + "nvme0n1p3": STUB_DISK_IO, +} + + +@pytest.fixture +def stub_memory_usage(): + return namedtuple( + "vmem", + "total available percent used free active inactive buffers cached shared", + )( + 15722012672, + 9329594368, + 40.7, + 5137018880, + 4678086656, + 6991405056, + 2078953472, + 1156378624, + 4750528512, + 898908160, + ) @pytest.fixture(scope="module") @@ -180,7 +220,7 @@ def stub_user(): def _get_proc_name(proc): - return proc.name() if PSUTIL2 else proc.name + return proc.name() def _get_proc_pid(proc): @@ -202,6 +242,7 @@ def __init__( status=None, username=None, pid=None, + cpu_times=None, ): self._cmdline = salt.utils.data.decode( cmdline if cmdline is not None else [], to_str=True @@ -218,6 +259,25 @@ def __init__( pid if pid is not None else 12345, to_str=True ) + if salt.utils.platform.is_windows(): + scputimes = namedtuple( + "scputimes", ["user", "system", "children_user", "children_system"] + ) + dummy_cpu_times = scputimes(7713.79, 1278.44, 17114.2, 2023.36) + else: + scputimes = namedtuple( + "scputimes", + ["user", "system", "children_user", "children_system", "iowait"], + ) + dummy_cpu_times = scputimes(7713.79, 1278.44, 17114.2, 2023.36, 0.0) + self._cpu_times = cpu_times if cpu_times is not None else dummy_cpu_times + + def __enter__(self): + pass + + def __exit__(self): + pass + def cmdline(self): return self._cmdline @@ -236,16 +296,18 @@ def username(self): def pid(self): return self._pid + def cpu_times(self): + return self._cpu_times + @pytest.fixture def mocked_proc(): mocked_proc = MagicMock("salt.utils.psutil_compat.Process") - if PSUTIL2: - mocked_proc.name = Mock(return_value="test_mock_proc") - mocked_proc.pid = Mock(return_value=9999999999) - else: - mocked_proc.name = "test_mock_proc" - mocked_proc.pid = 9999999999 + mocked_proc.name = Mock(return_value="test_mock_proc") + mocked_proc.pid = Mock(return_value=9999999999) + mocked_proc.cmdline = Mock( + return_value=["test_mock_proc", "--arg", "--kwarg=value"] + ) with patch("salt.utils.psutil_compat.Process.send_signal"), patch( "salt.utils.psutil_compat.process_iter", @@ -254,12 +316,115 @@ def mocked_proc(): yield mocked_proc -@pytest.mark.skipif(not ps.PSUTIL2, reason="Only run for psutil 2.x") def test__get_proc_cmdline(): cmdline = ["echo", "питон"] ret = ps._get_proc_cmdline(DummyProcess(cmdline=cmdline)) assert ret == cmdline, ret + with patch.object(DummyProcess, "cmdline") as mock_cmdline: + mock_cmdline.side_effect = psutil.NoSuchProcess(DummyProcess(cmdline=cmdline)) + ret = ps._get_proc_cmdline(DummyProcess(cmdline=cmdline)) + assert ret == [] + + with patch.object(DummyProcess, "cmdline") as mock_cmdline: + mock_cmdline.side_effect = psutil.AccessDenied(DummyProcess(cmdline=cmdline)) + ret = ps._get_proc_cmdline(DummyProcess(cmdline=cmdline)) + assert ret == [] + + +def test__get_proc_create_time(): + cmdline = ["echo", "питон"] + create_time = 1694729500.1093624 + ret = ps._get_proc_create_time( + DummyProcess(cmdline=cmdline, create_time=create_time) + ) + assert ret == create_time + + with patch.object(DummyProcess, "create_time") as mock_create_time: + mock_create_time.side_effect = psutil.NoSuchProcess( + DummyProcess(cmdline=cmdline, create_time=create_time) + ) + ret = ps._get_proc_create_time( + DummyProcess(cmdline=cmdline, create_time=create_time) + ) + assert ret is None + + with patch.object(DummyProcess, "create_time") as mock_create_time: + mock_create_time.side_effect = psutil.AccessDenied( + DummyProcess(cmdline=cmdline, create_time=create_time) + ) + ret = ps._get_proc_create_time( + DummyProcess(cmdline=cmdline, create_time=create_time) + ) + assert ret is None + + +def test__get_proc_name(): + cmdline = ["echo", "питон"] + proc_name = "proc_name" + ret = ps._get_proc_name(DummyProcess(cmdline=cmdline, name=proc_name)) + assert ret == proc_name + + with patch.object(DummyProcess, "name") as mock_name: + mock_name.side_effect = psutil.NoSuchProcess( + DummyProcess(cmdline=cmdline, name=proc_name) + ) + ret = ps._get_proc_name(DummyProcess(cmdline=cmdline, name=proc_name)) + assert ret == [] + + with patch.object(DummyProcess, "name") as mock_name: + mock_name.side_effect = psutil.AccessDenied( + DummyProcess(cmdline=cmdline, name=proc_name) + ) + ret = ps._get_proc_name(DummyProcess(cmdline=cmdline, name=proc_name)) + assert ret == [] + + +def test__get_proc_status(): + cmdline = ["echo", "питон"] + proc_status = "sleeping" + ret = ps._get_proc_status(DummyProcess(cmdline=cmdline, status=proc_status)) + assert ret == proc_status + + with patch.object(DummyProcess, "status") as mock_status: + mock_status.side_effect = psutil.NoSuchProcess( + DummyProcess(cmdline=cmdline, status=proc_status) + ) + ret = ps._get_proc_status(DummyProcess(cmdline=cmdline, status=proc_status)) + assert ret is None + + with patch.object(DummyProcess, "status") as mock_status: + mock_status.side_effect = psutil.AccessDenied( + DummyProcess(cmdline=cmdline, status=proc_status) + ) + ret = ps._get_proc_status(DummyProcess(cmdline=cmdline, status=proc_status)) + assert ret is None + + +def test__get_proc_username(): + cmdline = ["echo", "питон"] + proc_username = "root" + ret = ps._get_proc_username(DummyProcess(cmdline=cmdline, username=proc_username)) + assert ret == proc_username + + with patch.object(DummyProcess, "username") as mock_username: + mock_username.side_effect = psutil.NoSuchProcess( + DummyProcess(cmdline=cmdline, username=proc_username) + ) + ret = ps._get_proc_username( + DummyProcess(cmdline=cmdline, username=proc_username) + ) + assert ret is None + + with patch.object(DummyProcess, "username") as mock_username: + mock_username.side_effect = psutil.AccessDenied( + DummyProcess(cmdline=cmdline, username=proc_username) + ) + ret = ps._get_proc_username( + DummyProcess(cmdline=cmdline, username=proc_username) + ) + assert ret is None + def test_get_pid_list(): with patch("salt.utils.psutil_compat.pids", MagicMock(return_value=STUB_PID_LIST)): @@ -267,6 +432,14 @@ def test_get_pid_list(): def test_kill_pid(): + cmdline = ["echo", "питон"] + top_proc = DummyProcess(cmdline=cmdline) + + with patch("salt.utils.psutil_compat.Process") as mock_process: + mock_process.side_effect = psutil.NoSuchProcess(top_proc) + ret = ps.kill_pid(0, signal=999) + assert not ret + with patch("salt.utils.psutil_compat.Process") as send_signal_mock: ps.kill_pid(0, signal=999) assert send_signal_mock.call_args == call(0) @@ -278,6 +451,19 @@ def test_pkill(mocked_proc): ps.pkill(_get_proc_name(mocked_proc), signal=test_signal) assert mocked_proc.send_signal.call_args == call(test_signal) + mocked_proc.send_signal = MagicMock(side_effect=psutil.NoSuchProcess(mocked_proc)) + ret = ps.pkill(_get_proc_name(mocked_proc), signal=test_signal) + assert ret is None + + mocked_proc.username = MagicMock(return_value="root") + with patch.object(ps, "_get_proc_username", return_value=None): + ret = ps.pkill(_get_proc_name(mocked_proc), signal=test_signal, user="root") + assert ret is None + + mocked_proc.username = MagicMock(return_value="root") + ret = ps.pkill(_get_proc_name(mocked_proc), signal=test_signal, user="root") + assert mocked_proc.send_signal.call_args == call(test_signal) + def test_pgrep(mocked_proc): with patch( @@ -286,6 +472,10 @@ def test_pgrep(mocked_proc): ): assert mocked_proc.pid in (ps.pgrep(_get_proc_name(mocked_proc)) or []) + assert mocked_proc.pid in ( + ps.pgrep(_get_proc_name(mocked_proc), full=True) or [] + ) + def test_pgrep_regex(mocked_proc): with patch( @@ -301,6 +491,14 @@ def test_cpu_percent(): with patch("salt.utils.psutil_compat.cpu_percent", MagicMock(return_value=1)): assert ps.cpu_percent() == 1 + with patch( + "salt.utils.psutil_compat.cpu_percent", MagicMock(return_value=(1, 1, 1, 1)) + ): + assert ps.cpu_percent(per_cpu=True) == [1, 1, 1, 1] + + with patch("salt.utils.psutil_compat.cpu_percent", MagicMock(return_value=1)): + assert ps.cpu_percent(per_cpu=False) == 1 + def test_cpu_times(): with patch( @@ -308,12 +506,31 @@ def test_cpu_times(): ): assert {"idle": 4, "nice": 2, "system": 3, "user": 1} == ps.cpu_times() + with patch( + "salt.utils.psutil_compat.cpu_times", + MagicMock(return_value=STUB_CPU_TIMES_PERCPU), + ): + assert [ + {"idle": 4, "nice": 2, "system": 3, "user": 1}, + {"idle": 4, "nice": 2, "system": 3, "user": 1}, + {"idle": 4, "nice": 2, "system": 3, "user": 1}, + {"idle": 4, "nice": 2, "system": 3, "user": 1}, + ] == ps.cpu_times(per_cpu=True) + @pytest.mark.skipif( HAS_PSUTIL_VERSION is False, reason="psutil 0.6.0 or greater is required for this test", ) def test_virtual_memory(): + with patch("salt.modules.ps.psutil.version_info", (0, 5, 9)): + with pytest.raises(CommandExecutionError) as exc: + ps.virtual_memory() + assert ( + exc.value.error + == "virtual_memory is only available in psutil 0.6.0 or greater" + ) + with patch( "salt.utils.psutil_compat.virtual_memory", MagicMock(return_value=STUB_VIRT_MEM), @@ -332,6 +549,15 @@ def test_virtual_memory(): reason="psutil 0.6.0 or greater is required for this test", ) def test_swap_memory(): + + with patch("salt.modules.ps.psutil.version_info", (0, 5, 9)): + with pytest.raises(CommandExecutionError) as exc: + ps.swap_memory() + assert ( + exc.value.error + == "swap_memory is only available in psutil 0.6.0 or greater" + ) + with patch( "salt.utils.psutil_compat.swap_memory", MagicMock(return_value=STUB_SWAP_MEM), @@ -377,12 +603,21 @@ def test_disk_partition_usage(): "salt.utils.psutil_compat.disk_partitions", MagicMock(return_value=[STUB_DISK_PARTITION]), ): - assert { - "device": "/dev/disk0s2", - "mountpoint": "/", - "opts": "rw,local,rootfs,dovolfs,journaled,multilabel", - "fstype": "hfs", - } == ps.disk_partitions()[0] + with patch( + "salt.utils.psutil_compat.disk_usage", + MagicMock(return_value=STUB_DISK_USAGE), + ): + result = ps.disk_partition_usage()[0] + assert { + "device": "/dev/disk0s2", + "mountpoint": "/", + "fstype": "hfs", + "opts": "rw,local,rootfs,dovolfs,journaled,multilabel", + "total": 1000, + "used": 500, + "free": 500, + "percent": 50, + } == result def test_network_io_counters(): @@ -401,6 +636,23 @@ def test_network_io_counters(): "dropin": 3, } == ps.network_io_counters() + with patch( + "salt.utils.psutil_compat.net_io_counters", + MagicMock(return_value=STUB_NETWORK_IO_PERNIC), + ): + assert { + "packets_sent": 500, + "packets_recv": 600, + "bytes_recv": 2000, + "dropout": 4, + "bytes_sent": 1000, + "errout": 2, + "errin": 1, + "dropin": 3, + } == ps.network_io_counters(interface="eth0") + + assert not ps.network_io_counters(interface="eth2") + def test_disk_io_counters(): with patch( @@ -416,6 +668,21 @@ def test_disk_io_counters(): "write_count": 2000, } == ps.disk_io_counters() + with patch( + "salt.utils.psutil_compat.disk_io_counters", + MagicMock(return_value=STUB_DISK_IO_PERDISK), + ): + assert { + "read_time": 2000, + "write_bytes": 600, + "read_bytes": 500, + "write_time": 3000, + "read_count": 1000, + "write_count": 2000, + } == ps.disk_io_counters(device="nvme0n1p1") + + assert not ps.disk_io_counters(device="nvme0n1p4") + def test_get_users(stub_user): with patch("salt.utils.psutil_compat.users", MagicMock(return_value=[stub_user])): @@ -438,6 +705,134 @@ def test_top(): result = ps.top(num_processes=1, interval=0) assert len(result) == 1 + cmdline = ["echo", "питон"] + top_proc = DummyProcess(cmdline=cmdline) + + with patch("salt.utils.psutil_compat.pids", return_value=[1]): + with patch("salt.utils.psutil_compat.Process") as mock_process: + mock_process.side_effect = psutil.NoSuchProcess(top_proc) + ret = ps.top(num_processes=1, interval=0) + assert ret == [] + + if salt.utils.platform.is_windows(): + scputimes = namedtuple( + "scputimes", ["user", "system", "children_user", "children_system"] + ) + zombie_cpu_times = scputimes(0, 0, 0, 0) + + smem_info = namedtuple( + "pmem", + [ + "rss", + "vms", + "num_page_faults", + "peak_wset", + "wset", + "peak_paged_pool", + "paged_pool", + "peak_nonpaged_pool", + "nonpaged_pool28144", + "pagefile", + "peak_pagefile", + "private", + ], + ) + zombie_mem_info = smem_info(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0) + else: + scputimes = namedtuple( + "scputimes", + ["user", "system", "children_user", "children_system", "iowait"], + ) + zombie_cpu_times = scputimes(0, 0, 0, 0, 0) + + smem_info = namedtuple( + "pmem", ["rss", "vms", "shared", "text", "lib", "data", "dirty"] + ) + zombie_mem_info = smem_info(0, 0, 0, 0, 0, 0, 0) + + with patch("salt.utils.psutil_compat.pids", return_value=[1]): + with patch("salt.utils.psutil_compat.Process", return_value=top_proc): + with patch.object(top_proc, "cpu_times") as mock_cpu_times: + with patch.object( + top_proc, "memory_info", return_value=zombie_mem_info, create=True + ): + mock_cpu_times.side_effect = [ + psutil.ZombieProcess(top_proc), + zombie_cpu_times, + zombie_cpu_times, + ] + ret = ps.top(num_processes=1, interval=0) + + if salt.utils.platform.is_windows(): + expected_mem = { + "rss": 0, + "vms": 0, + "num_page_faults": 0, + "peak_wset": 0, + "wset": 0, + "peak_paged_pool": 0, + "paged_pool": 0, + "peak_nonpaged_pool": 0, + "nonpaged_pool28144": 0, + "pagefile": 0, + "peak_pagefile": 0, + "private": 0, + } + + expected_cpu = { + "user": 0, + "system": 0, + "children_user": 0, + "children_system": 0, + } + + else: + expected_mem = { + "rss": 0, + "vms": 0, + "shared": 0, + "text": 0, + "lib": 0, + "data": 0, + "dirty": 0, + } + + expected_cpu = { + "user": 0, + "system": 0, + "children_user": 0, + "children_system": 0, + "iowait": 0, + } + + assert ret[0]["mem"] == expected_mem + assert ret[0]["cpu"] == expected_cpu + + with patch("salt.utils.psutil_compat.pids", return_value=[1]): + with patch("salt.utils.psutil_compat.Process", return_value=top_proc): + with patch.object(top_proc, "cpu_times") as mock_cpu_times: + mock_cpu_times.side_effect = [ + top_proc._cpu_times, + psutil.NoSuchProcess(top_proc), + ] + ret = ps.top(num_processes=1, interval=0) + assert ret == [] + + with patch("salt.utils.psutil_compat.pids", return_value=[1]): + with patch("salt.utils.psutil_compat.Process", return_value=top_proc): + with patch.object(top_proc, "cpu_times") as mock_cpu_times: + with patch.object( + top_proc, "memory_info", create=True + ) as mock_memory_info: + mock_memory_info.side_effect = psutil.NoSuchProcess(top_proc) + mock_cpu_times.side_effect = [ + psutil.ZombieProcess(top_proc), + zombie_cpu_times, + zombie_cpu_times, + ] + ret = ps.top(num_processes=1, interval=0) + assert ret == [] + def test_top_zombie_process(): # Get 3 pids that are currently running on the system @@ -506,3 +901,388 @@ def test_status_when_access_denied_from_psutil_then_raise_exception(): # @patch('salt.utils.psutil_compat.get_users', new=MagicMock(return_value=None)) # This will force the function to use utmp # def test_get_users_utmp(): # pass + + +def test_psaux(): + """ + Testing psaux function in the ps module + """ + + cmd_run_mock = """ +USER PID %CPU %MEM VSZ RSS TTY STAT START TIME COMMAND +root 1 0.0 0.0 171584 15740 ? Ss Aug09 4:18 /usr/lib/systemd/systemd --system --deserialize=83 +root 2 0.0 0.0 0 0 ? S Aug09 0:02 [kthreadd] +root 2710129 0.0 0.0 18000 7428 pts/4 S+ Aug21 0:33 sudo -E salt-master -l debug +root 2710131 0.0 0.0 18000 1196 pts/6 Ss Aug21 0:00 sudo -E salt-master -l debug +""" + + with patch.dict(ps.__salt__, {"cmd.run": MagicMock(return_value=cmd_run_mock)}): + expected = [ + "salt-master", + [ + "root 2710129 0.0 0.0 18000 7428 pts/4 S+ Aug21 0:33 sudo -E salt-master -l debug", + "root 2710131 0.0 0.0 18000 1196 pts/6 Ss Aug21 0:00 sudo -E salt-master -l debug", + ], + "2 occurrence(s).", + ] + ret = ps.psaux("salt-master") + assert ret == expected + + expected = ["salt-minion", [], "0 occurrence(s)."] + ret = ps.psaux("salt-minion") + assert ret == expected + + +@pytest.mark.skip_on_windows(reason="ss not available in Windows") +def test_ss(): + """ + Testing ss function in the ps module + """ + + cmd_run_mock = """ +tcp LISTEN 0 128 0.0.0.0:22 0.0.0.0:* ino:31907 sk:364b cgroup:/system.slice/sshd.service <-> + +tcp LISTEN 0 128 [::]:22 [::]:* ino:31916 sk:36c4 cgroup:/system.slice/sshd.service v6only:1 <-> +""" + + with patch( + "salt.utils.path.which", MagicMock(return_value="/usr/sbin/ss") + ), patch.dict(ps.__salt__, {"cmd.run": MagicMock(return_value=cmd_run_mock)}): + expected = [ + "sshd", + [ + "tcp LISTEN 0 128 0.0.0.0:22 0.0.0.0:* ino:31907 sk:364b cgroup:/system.slice/sshd.service <->", + "tcp LISTEN 0 128 [::]:22 [::]:* ino:31916 sk:36c4 cgroup:/system.slice/sshd.service v6only:1 <->", + ], + ] + ret = ps.ss("sshd") + assert ret == expected + + expected = ["apache2", []] + ret = ps.ss("apache2") + assert ret == expected + + +def test_netstat(): + """ + Testing netstat function in the ps module + """ + + cmd_run_mock = """ +Active Internet connections (servers and established) +Proto Recv-Q Send-Q Local Address Foreign Address State PID/Program name +tcp 0 0 0.0.0.0:22 0.0.0.0:* LISTEN 668/sshd: /usr/sbin +tcp6 0 0 :::22 :::* LISTEN 668/sshd: /usr/sbin +""" + + with patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/netstat")): + with patch.dict(ps.__salt__, {"cmd.run": MagicMock(return_value=cmd_run_mock)}): + expected = [ + "sshd", + [ + "tcp 0 0 0.0.0.0:22 0.0.0.0:* LISTEN 668/sshd: /usr/sbin", + "tcp6 0 0 :::22 :::* LISTEN 668/sshd: /usr/sbin", + ], + ] + ret = ps.netstat("sshd") + assert ret == expected + + expected = ["apache2", []] + ret = ps.netstat("apache2") + assert ret == expected + + +def test_lsof(): + """ + Testing lsof function in the ps module + """ + + sshd_cmd_run_mock = """ +COMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME +sshd 1743 root cwd DIR 254,0 4096 2 / +sshd 1743 root rtd DIR 254,0 4096 2 / +sshd 1743 root txt REG 254,0 925000 7533685 /usr/bin/sshd (deleted) +sshd 1743 root DEL REG 254,0 7481413 /usr/lib/libc.so.6 +sshd 1743 root DEL REG 254,0 7477716 /usr/lib/libcrypto.so.3 +sshd 1743 root mem REG 254,0 26520 7482162 /usr/lib/libcap-ng.so.0.0.0 +sshd 1743 root DEL REG 254,0 7512187 /usr/lib/libresolv.so.2 +sshd 1743 root mem REG 254,0 22400 7481786 /usr/lib/libkeyutils.so.1.10 +sshd 1743 root mem REG 254,0 55352 7480841 /usr/lib/libkrb5support.so.0.1 +sshd 1743 root mem REG 254,0 18304 7475778 /usr/lib/libcom_err.so.2.1 +sshd 1743 root mem REG 254,0 182128 7477432 /usr/lib/libk5crypto.so.3.1 +sshd 1743 root DEL REG 254,0 7485543 /usr/lib/libaudit.so.1.0.0 +sshd 1743 root DEL REG 254,0 7485432 /usr/lib/libz.so.1.2.13 +sshd 1743 root mem REG 254,0 882552 7480814 /usr/lib/libkrb5.so.3.3 +sshd 1743 root mem REG 254,0 344160 7475833 /usr/lib/libgssapi_krb5.so.2.2 +sshd 1743 root mem REG 254,0 67536 7482132 /usr/lib/libpam.so.0.85.1 +sshd 1743 root mem REG 254,0 165832 7481746 /usr/lib/libcrypt.so.2.0.0 +sshd 1743 root DEL REG 254,0 7480993 /usr/lib/ld-linux-x86-64.so.2 +sshd 1743 root 0r CHR 1,3 0t0 4 /dev/null +sshd 1743 root 1u unix 0x0000000000000000 0t0 32930 type=STREAM (CONNECTED) +sshd 1743 root 2u unix 0x0000000000000000 0t0 32930 type=STREAM (CONNECTED) +sshd 1743 root 3u IPv4 31907 0t0 TCP *:ssh (LISTEN) +sshd 1743 root 4u IPv6 31916 0t0 TCP *:ssh (LISTEN) +""" + + apache2_cmd_run_mock = "" + + with patch("salt.utils.path.which", MagicMock(return_value="/usr/bin/netstat")): + with patch.dict( + ps.__salt__, {"cmd.run": MagicMock(return_value=sshd_cmd_run_mock)} + ): + expected = [ + "sshd", + "\nCOMMAND PID USER FD TYPE DEVICE SIZE/OFF NODE NAME\nsshd 1743 root cwd DIR 254,0 4096 2 /\nsshd 1743 root rtd DIR 254,0 4096 2 /\nsshd 1743 root txt REG 254,0 925000 7533685 /usr/bin/sshd (deleted)\nsshd 1743 root DEL REG 254,0 7481413 /usr/lib/libc.so.6\nsshd 1743 root DEL REG 254,0 7477716 /usr/lib/libcrypto.so.3\nsshd 1743 root mem REG 254,0 26520 7482162 /usr/lib/libcap-ng.so.0.0.0\nsshd 1743 root DEL REG 254,0 7512187 /usr/lib/libresolv.so.2\nsshd 1743 root mem REG 254,0 22400 7481786 /usr/lib/libkeyutils.so.1.10\nsshd 1743 root mem REG 254,0 55352 7480841 /usr/lib/libkrb5support.so.0.1\nsshd 1743 root mem REG 254,0 18304 7475778 /usr/lib/libcom_err.so.2.1\nsshd 1743 root mem REG 254,0 182128 7477432 /usr/lib/libk5crypto.so.3.1\nsshd 1743 root DEL REG 254,0 7485543 /usr/lib/libaudit.so.1.0.0\nsshd 1743 root DEL REG 254,0 7485432 /usr/lib/libz.so.1.2.13\nsshd 1743 root mem REG 254,0 882552 7480814 /usr/lib/libkrb5.so.3.3\nsshd 1743 root mem REG 254,0 344160 7475833 /usr/lib/libgssapi_krb5.so.2.2\nsshd 1743 root mem REG 254,0 67536 7482132 /usr/lib/libpam.so.0.85.1\nsshd 1743 root mem REG 254,0 165832 7481746 /usr/lib/libcrypt.so.2.0.0\nsshd 1743 root DEL REG 254,0 7480993 /usr/lib/ld-linux-x86-64.so.2\nsshd 1743 root 0r CHR 1,3 0t0 4 /dev/null\nsshd 1743 root 1u unix 0x0000000000000000 0t0 32930 type=STREAM (CONNECTED)\nsshd 1743 root 2u unix 0x0000000000000000 0t0 32930 type=STREAM (CONNECTED)\nsshd 1743 root 3u IPv4 31907 0t0 TCP *:ssh (LISTEN)\nsshd 1743 root 4u IPv6 31916 0t0 TCP *:ssh (LISTEN)\n", + ] + ret = ps.lsof("sshd") + assert ret == expected + + with patch.dict( + ps.__salt__, {"cmd.run": MagicMock(return_value=apache2_cmd_run_mock)} + ): + expected = ["apache2", ""] + ret = ps.lsof("apache2") + assert ret == expected + + +def test_boot_time(): + """ + Testing boot_time function in the ps module + """ + + with patch( + "salt.utils.psutil_compat.boot_time", MagicMock(return_value=1691593290.0) + ): + expected = 1691593290 + ret = ps.boot_time() + assert ret == expected + + expected = "08/09/2023" + ret = ps.boot_time(time_format="%m/%d/%Y") + assert ret == expected + + with patch("salt.utils.psutil_compat.boot_time") as mock_boot_time: + mock_boot_time.side_effect = [AttributeError(), 1691593290.0] + expected = 1691593290 + ret = ps.boot_time() + assert ret == expected + + +def test_num_cpus(): + """ + Testing num_cpus function in the ps module + """ + + with patch("salt.utils.psutil_compat.cpu_count") as mock_cpu_count: + mock_cpu_count.side_effect = AttributeError() + with patch("salt.utils.psutil_compat.NUM_CPUS", create=True, new=5): + ret = ps.num_cpus() + assert ret == 5 + + with patch("salt.utils.psutil_compat.cpu_count") as mock_cpu_count: + mock_cpu_count.return_value = 5 + ret = ps.num_cpus() + assert ret == 5 + + +def test_total_physical_memory(stub_memory_usage): + """ + Testing total_physical_memory function in the ps module + """ + + with patch("salt.modules.ps.psutil.version_info", (0, 5, 9)): + with pytest.raises(CommandExecutionError) as exc: + ps.total_physical_memory() + assert ( + exc.value.error + == "virtual_memory is only available in psutil 0.6.0 or greater" + ) + + with patch("salt.utils.psutil_compat.virtual_memory") as mock_total_physical_memory: + mock_total_physical_memory.side_effect = AttributeError() + with patch( + "salt.utils.psutil_compat.TOTAL_PHYMEM", + create=True, + new=stub_memory_usage.total, + ): + ret = ps.total_physical_memory() + assert ret == 15722012672 + + with patch("salt.utils.psutil_compat.virtual_memory") as mock_total_physical_memory: + mock_total_physical_memory.return_value = stub_memory_usage + ret = ps.total_physical_memory() + assert ret == 15722012672 + + +def test_proc_info(): + """ + Testing proc_info function in the ps module + """ + status = b"fnord" + extra_data = { + "utime": "42", + "stime": "42", + "children_utime": "42", + "children_stime": "42", + "ttynr": "42", + "cpu_time": "42", + "blkio_ticks": "99", + "ppid": "99", + "cpu_num": "9999999", + } + important_data = { + "name": b"blerp", + "status": status, + "create_time": "393829200", + "username": "root", + } + important_data.update(extra_data) + status_file = b"Name:\tblerp\nUmask:\t0000\nState:\tI (idle)\nTgid:\t99\nNgid:\t0\nPid:\t99\nPPid:\t2\nTracerPid:\t0\nUid:\t0\t0\t0\t0\nGid:\t0\t0\t0\t0\nFDSize:\t64\nGroups:\t \nNStgid:\t99\nNSpid:\t99\nNSpgid:\t0\nNSsid:\t0\nThreads:\t1\nSigQ:\t3/256078\nSigPnd:\t0000000000000000\nShdPnd:\t0000000000000000\nSigBlk:\t0000000000000000\nSigIgn:\tffffffffffffffff\nSigCgt:\t0000000000000000\nCapInh:\t0000000000000000\nCapPrm:\t000001ffffffffff\nCapEff:\t000001ffffffffff\nCapBnd:\t000001ffffffffff\nCapAmb:\t0000000000000000\nNoNewPrivs:\t0\nSeccomp:\t0\nSeccomp_filters:\t0\nSpeculation_Store_Bypass:\tthread vulnerable\nSpeculationIndirectBranch:\tconditional enabled\nCpus_allowed:\tfff\nCpus_allowed_list:\t0-11\nMems_allowed:\t00000001\nMems_allowed_list:\t0\nvoluntary_ctxt_switches:\t2\nnonvoluntary_ctxt_switches:\t0\n" + + patch_stat_file = patch( + "psutil._psplatform.Process._parse_stat_file", + return_value=important_data, + create=True, + ) + patch_exe = patch( + "psutil._psplatform.Process.exe", + return_value=important_data["name"].decode(), + create=True, + ) + patch_oneshot = patch( + "psutil._psplatform.Process.oneshot", + return_value={ + # These keys can be found in psutil/_psbsd.py + 1: important_data["status"].decode(), + # create + 9: float(important_data["create_time"]), + # user + 14: float(important_data["create_time"]), + # sys + 15: float(important_data["create_time"]), + # ch_user + 16: float(important_data["create_time"]), + # ch_sys -- we don't really care what they are, obviously + 17: float(important_data["create_time"]), + 24: important_data["name"].decode(), + }, + create=True, + ) + patch_kinfo = patch( + "psutil._psplatform.Process._get_kinfo_proc", + return_value={ + # These keys can be found in psutil/_psosx.py + 9: important_data["status"].decode(), + 8: float(important_data["create_time"]), + 10: important_data["name"].decode(), + }, + create=True, + ) + patch_status = patch( + "psutil._psplatform.Process.status", return_value=status.decode() + ) + patch_create_time = patch( + "psutil._psplatform.Process.create_time", return_value=393829200 + ) + with patch_stat_file, patch_status, patch_create_time, patch_exe, patch_oneshot, patch_kinfo: + if salt.utils.platform.is_windows(): + with patch("psutil._pswindows.cext") as mock__psutil_windows: + with patch("psutil._pswindows.Process.ppid", return_value=99): + mock__psutil_windows.proc_username.return_value = ( + "NT Authority", + "System", + ) + + expected = {"ppid": 99, "username": r"NT Authority\System"} + actual_result = salt.modules.ps.proc_info( + pid=99, attrs=["username", "ppid"] + ) + assert actual_result == expected + + expected = {"pid": 99, "name": "blerp"} + actual_result = salt.modules.ps.proc_info( + pid=99, attrs=["pid", "name"] + ) + assert actual_result == expected + else: + patch_read_status_file = patch( + "psutil._psplatform.Process._read_status_file", return_value=status_file + ) + with patch_read_status_file: + expected = {"ppid": 99, "username": "root"} + actual_result = salt.modules.ps.proc_info( + pid=99, attrs=["username", "ppid"] + ) + assert actual_result == expected + + expected = {"pid": 99, "name": "blerp"} + actual_result = salt.modules.ps.proc_info(pid=99, attrs=["pid", "name"]) + assert actual_result == expected + + +def test_proc_info_access_denied(): + """ + Testing proc_info function in the ps module + when an AccessDenied exception occurs + """ + cmdline = ["echo", "питон"] + dummy_proc = DummyProcess(cmdline=cmdline) + with patch("salt.utils.psutil_compat.Process") as mock_process: + mock_process.side_effect = psutil.AccessDenied(dummy_proc) + with pytest.raises(CommandExecutionError): + salt.modules.ps.proc_info(pid=99, attrs=["username", "ppid"]) + + +def test_proc_info_no_such_process(): + """ + Testing proc_info function in the ps module + when an NoSuchProcess exception occurs + """ + cmdline = ["echo", "питон"] + dummy_proc = DummyProcess(cmdline=cmdline) + with patch("salt.utils.psutil_compat.Process") as mock_process: + mock_process.side_effect = psutil.NoSuchProcess(dummy_proc) + with pytest.raises(CommandExecutionError): + salt.modules.ps.proc_info(pid=99, attrs=["username", "ppid"]) + + +def test_proc_info_attribute_error(): + """ + Testing proc_info function in the ps module + when an AttributeError exception occurs + """ + cmdline = ["echo", "питон"] + with patch("salt.utils.psutil_compat.Process") as mock_process: + mock_process.side_effect = AttributeError() + with pytest.raises(CommandExecutionError): + salt.modules.ps.proc_info(pid=99, attrs=["username", "ppid"]) + + +def test__virtual__no_psutil(): + """ + Test __virtual__ function + """ + with patch.object(ps, "HAS_PSUTIL", False): + expected = ( + False, + "The ps module cannot be loaded: python module psutil not installed.", + ) + result = ps.__virtual__() + assert result == expected + + +def test__virtual__wrong_version(): + with patch("salt.modules.ps.psutil.version_info", (0, 2, 9)): + expected = ( + False, + "The ps execution module cannot be loaded: the psutil python module version {}" + " is less than 0.3.0".format(psutil.version_info), + ) + result = ps.__virtual__() + assert result == expected + + +def test__virtual__correct_version(): + with patch("salt.modules.ps.psutil.version_info", (0, 3, 0)): + result = ps.__virtual__() + assert result diff --git a/tests/pytests/unit/modules/test_redismod.py b/tests/pytests/unit/modules/test_redismod.py new file mode 100644 index 000000000000..81f8a3545c8a --- /dev/null +++ b/tests/pytests/unit/modules/test_redismod.py @@ -0,0 +1,483 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.redismod +""" + +from datetime import datetime + +import pytest + +import salt.modules.redismod as redismod +from tests.support.mock import MagicMock + + +class Mockredis: + """ + Mock redis class + """ + + class ConnectionError(Exception): + """ + Mock ConnectionError class + """ + + +class MockConnect: + """ + Mock Connect class + """ + + counter = 0 + + def __init__(self): + self.name = None + self.pattern = None + self.value = None + self.key = None + self.seconds = None + self.timestamp = None + self.field = None + self.start = None + self.stop = None + self.master_host = None + self.master_port = None + + @staticmethod + def bgrewriteaof(): + """ + Mock bgrewriteaof method + """ + return "A" + + @staticmethod + def bgsave(): + """ + Mock bgsave method + """ + return "A" + + def config_get(self, pattern): + """ + Mock config_get method + """ + self.pattern = pattern + return "A" + + def config_set(self, name, value): + """ + Mock config_set method + """ + self.name = name + self.value = value + return "A" + + @staticmethod + def dbsize(): + """ + Mock dbsize method + """ + return "A" + + @staticmethod + def delete(): + """ + Mock delete method + """ + return "A" + + def exists(self, key): + """ + Mock exists method + """ + self.key = key + return "A" + + def expire(self, key, seconds): + """ + Mock expire method + """ + self.key = key + self.seconds = seconds + return "A" + + def expireat(self, key, timestamp): + """ + Mock expireat method + """ + self.key = key + self.timestamp = timestamp + return "A" + + @staticmethod + def flushall(): + """ + Mock flushall method + """ + return "A" + + @staticmethod + def flushdb(): + """ + Mock flushdb method + """ + return "A" + + def get(self, key): + """ + Mock get method + """ + self.key = key + return "A" + + def hget(self, key, field): + """ + Mock hget method + """ + self.key = key + self.field = field + return "A" + + def hgetall(self, key): + """ + Mock hgetall method + """ + self.key = key + return "A" + + @staticmethod + def info(): + """ + Mock info method + """ + return "A" + + def keys(self, pattern): + """ + Mock keys method + """ + self.pattern = pattern + return "A" + + def type(self, key): + """ + Mock type method + """ + self.key = key + return "A" + + @staticmethod + def lastsave(): + """ + Mock lastsave method + """ + return datetime.now() + + def llen(self, key): + """ + Mock llen method + """ + self.key = key + return "A" + + def lrange(self, key, start, stop): + """ + Mock lrange method + """ + self.key = key + self.start = start + self.stop = stop + return "A" + + @staticmethod + def ping(): + """ + Mock ping method + """ + MockConnect.counter = MockConnect.counter + 1 + if MockConnect.counter == 1: + return "A" + elif MockConnect.counter in (2, 3, 5): + raise Mockredis.ConnectionError("foo") + + @staticmethod + def save(): + """ + Mock save method + """ + return "A" + + def set(self, key, value): + """ + Mock set method + """ + self.key = key + self.value = value + return "A" + + @staticmethod + def shutdown(): + """ + Mock shutdown method + """ + return "A" + + def slaveof(self, master_host, master_port): + """ + Mock slaveof method + """ + self.master_host = master_host + self.master_port = master_port + return "A" + + def smembers(self, key): + """ + Mock smembers method + """ + self.key = key + return "A" + + @staticmethod + def time(): + """ + Mock time method + """ + return "A" + + def zcard(self, key): + """ + Mock zcard method + """ + self.key = key + return "A" + + def zrange(self, key, start, stop): + """ + Mock zrange method + """ + self.key = key + self.start = start + self.stop = stop + return "A" + + +@pytest.fixture +def configure_loader_modules(): + return { + redismod: { + "redis": Mockredis, + "_connect": MagicMock(return_value=MockConnect()), + } + } + + +def test_bgrewriteaof(): + """ + Test to asynchronously rewrite the append-only file + """ + assert redismod.bgrewriteaof() == "A" + + +def test_bgsave(): + """ + Test to asynchronously save the dataset to disk + """ + assert redismod.bgsave() == "A" + + +def test_config_get(): + """ + Test to get redis server configuration values + """ + assert redismod.config_get("*") == "A" + + +def test_config_set(): + """ + Test to set redis server configuration values + """ + assert redismod.config_set("name", "value") == "A" + + +def test_dbsize(): + """ + Test to return the number of keys in the selected database + """ + assert redismod.dbsize() == "A" + + +def test_delete(): + """ + Test to deletes the keys from redis, returns number of keys deleted + """ + assert redismod.delete() == "A" + + +def test_exists(): + """ + Test to return true if the key exists in redis + """ + assert redismod.exists("key") == "A" + + +def test_expire(): + """ + Test to set a keys time to live in seconds + """ + assert redismod.expire("key", "seconds") == "A" + + +def test_expireat(): + """ + Test to set a keys expire at given UNIX time + """ + assert redismod.expireat("key", "timestamp") == "A" + + +def test_flushall(): + """ + Test to remove all keys from all databases + """ + assert redismod.flushall() == "A" + + +def test_flushdb(): + """ + Test to remove all keys from the selected database + """ + assert redismod.flushdb() == "A" + + +def test_get_key(): + """ + Test to get redis key value + """ + assert redismod.get_key("key") == "A" + + +def test_hget(): + """ + Test to get specific field value from a redis hash, returns dict + """ + assert redismod.hget("key", "field") == "A" + + +def test_hgetall(): + """ + Test to get all fields and values from a redis hash, returns dict + """ + assert redismod.hgetall("key") == "A" + + +def test_info(): + """ + Test to get information and statistics about the server + """ + assert redismod.info() == "A" + + +def test_keys(): + """ + Test to get redis keys, supports glob style patterns + """ + assert redismod.keys("pattern") == "A" + + +def test_key_type(): + """ + Test to get redis key type + """ + assert redismod.key_type("key") == "A" + + +def test_lastsave(): + """ + Test to get the UNIX time in seconds of the last successful + save to disk + """ + assert redismod.lastsave() + + +def test_llen(): + """ + Test to get the length of a list in Redis + """ + assert redismod.llen("key") == "A" + + +def test_lrange(): + """ + Test to get a range of values from a list in Redis + """ + assert redismod.lrange("key", "start", "stop") == "A" + + +def test_ping(): + """ + Test to ping the server, returns False on connection errors + """ + assert redismod.ping() == "A" + + assert not redismod.ping() + + +def test_save(): + """ + Test to synchronously save the dataset to disk + """ + assert redismod.save() == "A" + + +def test_set_key(): + """ + Test to set redis key value + """ + assert redismod.set_key("key", "value") == "A" + + +def test_shutdown(): + """ + Test to synchronously save the dataset to disk and then + shut down the server + """ + assert not redismod.shutdown() + + assert redismod.shutdown() + + assert not redismod.shutdown() + + +def test_slaveof(): + """ + Test to make the server a slave of another instance, or + promote it as master + """ + assert redismod.slaveof("master_host", "master_port") == "A" + + +def test_smembers(): + """ + Test to get members in a Redis set + """ + assert redismod.smembers("key") == ["A"] + + +def test_time(): + """ + Test to return the current server UNIX time in seconds + """ + assert redismod.time() == "A" + + +def test_zcard(): + """ + Test to get the length of a sorted set in Redis + """ + assert redismod.zcard("key") == "A" + + +def test_zrange(): + """ + Test to get a range of values from a sorted set in Redis by index + """ + assert redismod.zrange("key", "start", "stop") == "A" diff --git a/tests/pytests/unit/modules/test_reg.py b/tests/pytests/unit/modules/test_reg.py index 2b361466f06d..51ef0a15a3da 100644 --- a/tests/pytests/unit/modules/test_reg.py +++ b/tests/pytests/unit/modules/test_reg.py @@ -867,4 +867,4 @@ def test_delete_key_recursive_unicode(unicode_key, fake_key): == expected ) finally: - reg.delete_key_recursive(hive="HKLM", key=fake_key) + reg.delete_key_recursive(hive="HKLM", key=fake_key) \ No newline at end of file diff --git a/tests/pytests/unit/modules/test_salt_version.py b/tests/pytests/unit/modules/test_salt_version.py index 6d734f6a7647..4b7a7cd07319 100644 --- a/tests/pytests/unit/modules/test_salt_version.py +++ b/tests/pytests/unit/modules/test_salt_version.py @@ -2,8 +2,11 @@ Unit tests for salt/modules/salt_version.py """ +import pytest + import salt.modules.salt_version as salt_version import salt.version +from salt.exceptions import CommandExecutionError from tests.support.mock import MagicMock, patch @@ -21,7 +24,7 @@ def test_mocked_objects(): for k, v in salt.version.SaltStackVersion.LNAMES.items(): assert k == k.lower() assert isinstance(v, tuple) - if sv.new_version(major=v[0]) and not sv.can_have_dot_zero(major=v[0]): + if sv.new_version(major=v[0]): assert len(v) == 1 else: assert len(v) == 2 @@ -64,6 +67,13 @@ def test_get_release_number_success_new_version(): assert salt_version.get_release_number("Neon") == "3000" +def test_get_release_number_success_new_version_with_dot(): + """ + Test that a version is returned for new versioning (3006) + """ + assert salt_version.get_release_number("Sulfur") == "3006" + + def test_equal_success(): """ Test that the current version is equal to the codename @@ -83,6 +93,16 @@ def test_equal_success_new_version(): assert salt_version.equal("foo") is True +def test_equal_success_new_version_with_dot(): + """ + Test that the current version is equal to the codename + while using the new versioning + """ + with patch("salt.version.SaltStackVersion", MagicMock(return_value="3006.1")): + with patch("salt.version.SaltStackVersion.LNAMES", {"foo": (3006,)}): + assert salt_version.equal("foo") is True + + def test_equal_older_codename(): """ Test that when an older codename is passed in, the function returns False. @@ -142,6 +162,17 @@ def test_greater_than_success_new_version(): assert salt_version.greater_than("Nitrogen") is True +def test_greater_than_success_new_version_with_dot(): + """ + Test that the current version is newer than the codename + """ + with patch( + "salt.modules.salt_version.get_release_number", MagicMock(return_value="3000") + ): + with patch("salt.version.SaltStackVersion", MagicMock(return_value="3006.0")): + assert salt_version.greater_than("Neon") is True + + def test_greater_than_with_equal_codename(): """ Test that when an equal codename is passed in, the function returns False. @@ -200,6 +231,28 @@ def test_less_than_success_new_version(): assert salt_version.less_than("Fluorine") is True +def test_less_than_success_new_version_with_dot(): + """ + Test that when a newer codename is passed in, the function returns True + using new version + """ + with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")): + with patch( + "salt.modules.salt_version.get_release_number", + MagicMock(return_value="3006"), + ): + assert salt_version.less_than("Fluorine") is True + + +def test_less_than_do_not_crash_when_input_is_a_number(): + """ + Test that less_than do not crash when unexpected inputs + """ + with patch("salt.version.SaltStackVersion", MagicMock(return_value="2018.3.2")): + with pytest.raises(CommandExecutionError): + salt_version.less_than(1234) + + def test_less_than_with_equal_codename(): """ Test that when an equal codename is passed in, the function returns False. diff --git a/tests/pytests/unit/modules/test_saltutil.py b/tests/pytests/unit/modules/test_saltutil.py index 889543c94549..42986c464e14 100644 --- a/tests/pytests/unit/modules/test_saltutil.py +++ b/tests/pytests/unit/modules/test_saltutil.py @@ -1,14 +1,22 @@ +import pathlib + import pytest import salt.modules.saltutil as saltutil from salt.client import LocalClient -from tests.support.mock import create_autospec +from tests.support.mock import create_autospec, patch from tests.support.mock import sentinel as s @pytest.fixture -def configure_loader_modules(): - return {saltutil: {}} +def configure_loader_modules(minion_opts): + minion_opts["file_client"] = "local" + minion_opts["master_uri"] = "tcp://127.0.0.1:4505" + return { + saltutil: { + "__opts__": minion_opts, + } + } def test_exec_kwargs(): @@ -82,3 +90,108 @@ def test_exec_kwargs(): **{"subset": s.subset, "batch": s.batch} ) client.cmd_batch.assert_called_with(batch=s.batch, **_cmd_expected_kwargs) + + +def test_refresh_grains_default_clean_pillar_cache(): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.refresh_grains() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_refresh_grains_default_clean_pillar_cache_with_refresh_false(): + with patch("salt.modules.saltutil.refresh_modules") as refresh_modules: + saltutil.refresh_grains(refresh_pillar=False) + refresh_modules.assert_called() + + +def test_refresh_grains_clean_pillar_cache(): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.refresh_grains(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_refresh_grains_clean_pillar_cache_with_refresh_false(): + with patch("salt.modules.saltutil.refresh_modules") as refresh_modules: + saltutil.refresh_grains(clean_pillar_cache=True, refresh_pillar=False) + refresh_modules.assert_called() + + +def test_sync_grains_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_grains() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_grains_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_grains(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_sync_pillar_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_pillar() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_pillar_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_pillar(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_sync_all_default_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_all() + refresh_pillar.assert_called_with(clean_cache=False) + + +def test_sync_all_clean_pillar_cache(): + with patch("salt.modules.saltutil._sync"): + with patch("salt.modules.saltutil.refresh_pillar") as refresh_pillar: + saltutil.sync_all(clean_pillar_cache=True) + refresh_pillar.assert_called_with(clean_cache=True) + + +def test_list_extmods(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["cachedir"], "extmods", "dummydir").mkdir( + parents=True, exist_ok=True + ) + ret = saltutil.list_extmods() + assert "dummydir" in ret + assert ret["dummydir"] == [] + + +def test_refresh_beacons(): + ret = saltutil.refresh_beacons() + assert ret is False + + +def test_refresh_matchers(): + ret = saltutil.refresh_matchers() + assert ret is False + + +def test_refresh_modules_async_false(): + kwargs = {"async": False} + ret = saltutil.refresh_modules(**kwargs) + assert ret is False + + +def test_clear_job_cache(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["cachedir"], "minion_jobs", "dummydir").mkdir( + parents=True, exist_ok=True + ) + ret = saltutil.clear_job_cache(hours=1) + assert ret is True + + +@pytest.mark.destructive_test +def test_regen_keys(salt_call_cli, minion_opts): + pathlib.Path(minion_opts["pki_dir"], "dummydir").mkdir(parents=True, exist_ok=True) + saltutil.regen_keys() diff --git a/tests/pytests/unit/modules/test_schedule.py b/tests/pytests/unit/modules/test_schedule.py index d3488559143f..d39ae30e5c2e 100644 --- a/tests/pytests/unit/modules/test_schedule.py +++ b/tests/pytests/unit/modules/test_schedule.py @@ -198,6 +198,38 @@ def test_build_schedule_item_invalid_jobs_args(): ) == {"comment": comment2, "result": False} +def test_build_schedule_item_jid_include(): + """ + Test build_schedule_item when jid_include is passed and not passed + """ + ret = schedule.build_schedule_item("job1", function="test.args", jid_include=False) + assert ret == { + "function": "test.args", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": False, + } + + ret = schedule.build_schedule_item("job1", function="test.args", jid_include=True) + assert ret == { + "function": "test.args", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + } + + ret = schedule.build_schedule_item("job1", function="test.args") + assert ret == { + "function": "test.args", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + } + + # 'add' function tests: 1 diff --git a/tests/pytests/unit/modules/test_selinux.py b/tests/pytests/unit/modules/test_selinux.py index e1f66dcfb3f9..b67a1b525774 100644 --- a/tests/pytests/unit/modules/test_selinux.py +++ b/tests/pytests/unit/modules/test_selinux.py @@ -1,8 +1,10 @@ +import re + import pytest import salt.modules.selinux as selinux from salt.exceptions import SaltInvocationError -from tests.support.mock import MagicMock, patch +from tests.support.mock import MagicMock, mock_open, patch @pytest.fixture @@ -19,7 +21,7 @@ def test_fcontext_get_policy_parsing(): { "semanage_out": ( "/var/www(/.*)? all files " - " system_u:object_r:httpd_sys_content_t:s0" + " system_u:object_r:httpd_sys_content_t:s0 " ), "name": "/var/www(/.*)?", "filetype": "all files", @@ -31,7 +33,7 @@ def test_fcontext_get_policy_parsing(): { "semanage_out": ( "/var/www(/.*)? all files " - " system_u:object_r:httpd_sys_content_t:s0" + " system_u:object_r:httpd_sys_content_t:s0 " ), "name": "/var/www(/.*)?", "filetype": "all files", @@ -43,7 +45,7 @@ def test_fcontext_get_policy_parsing(): { "semanage_out": ( "/var/lib/dhcp3? directory " - " system_u:object_r:dhcp_state_t:s0" + " system_u:object_r:dhcp_state_t:s0 " ), "name": "/var/lib/dhcp3?", "filetype": "directory", @@ -293,3 +295,118 @@ def test_fcontext_policy_parsing_fail(): "retcode": 1, "error": "Unrecognized response from restorecon command.", } + + +def test_selinux_config_enforcing(): + """ + Test values written to /etc/selinux/config are lowercase + """ + mock_file = """ +# This file controls the state of SELinux on the system. +# SELINUX= can take one of these three values: +# enforcing - SELinux security policy is enforced. +# permissive - SELinux prints warnings instead of enforcing. +# disabled - No SELinux policy is loaded. +## SELINUX=disabled +SELINUX=permissive +# SELINUXTYPE= can take one of these three values: +# targeted - Targeted processes are protected, +# minimum - Modification of targeted policy. Only selected processes are protected. +# mls - Multi Level Security protection. +SELINUXTYPE=targeted + +""" + with patch("salt.utils.files.fopen", mock_open(read_data=mock_file)) as m_open: + selinux.setenforce("Enforcing") + writes = m_open.write_calls() + assert writes + for line in writes: + if line.startswith("SELINUX="): + assert line == "SELINUX=enforcing" + + +def test_selinux_config_permissive(): + """ + Test values written to /etc/selinux/config are lowercase + """ + mock_file = """ +# This file controls the state of SELinux on the system. +# SELINUX= can take one of these three values: +# enforcing - SELinux security policy is enforced. +# permissive - SELinux prints warnings instead of enforcing. +# disabled - No SELinux policy is loaded. +SELINUX=disabled +# SELINUXTYPE= can take one of these three values: +# targeted - Targeted processes are protected, +# minimum - Modification of targeted policy. Only selected processes are protected. +# mls - Multi Level Security protection. +SELINUXTYPE=targeted + +""" + with patch("salt.utils.files.fopen", mock_open(read_data=mock_file)) as m_open: + selinux.setenforce("Permissive") + writes = m_open.write_calls() + assert writes + for line in writes: + if line.startswith("SELINUX="): + assert line == "SELINUX=permissive" + + +def test_selinux_config_disabled(): + """ + Test values written to /etc/selinux/config are lowercase + """ + mock_file = """ +# This file controls the state of SELinux on the system. +# SELINUX= can take one of these three values: +# enforcing - SELinux security policy is enforced. +# permissive - SELinux prints warnings instead of enforcing. +# disabled - No SELinux policy is loaded. +## SELINUX=disabled +SELINUX=permissive +# SELINUXTYPE= can take one of these three values: +# targeted - Targeted processes are protected, +# minimum - Modification of targeted policy. Only selected processes are protected. +# mls - Multi Level Security protection. +SELINUXTYPE=targeted + +""" + with patch("salt.utils.files.fopen", mock_open(read_data=mock_file)) as m_open: + selinux.setenforce("Disabled") + writes = m_open.write_calls() + assert writes + for line in writes: + if line.startswith("SELINUX="): + assert line == "SELINUX=disabled" + + +@pytest.mark.parametrize( + "name,sel_type", + ( + ("/srv/ssl/ldap/.*[.]key", "slapd_cert_t"), + ("/srv/ssl/ldap(/.*[.](pem|crt))?", "cert_t"), + ), +) +def test_selinux_add_policy_regex(name, sel_type): + """ + Test adding policy with regex components parsing the stdout response of restorecon used in fcontext_policy_applied, new style. + """ + mock_cmd_shell = MagicMock(return_value={"retcode": 0}) + mock_cmd_run_all = MagicMock(return_value={"retcode": 0}) + + with patch.dict(selinux.__salt__, {"cmd.shell": mock_cmd_shell}), patch.dict( + selinux.__salt__, {"cmd.run_all": mock_cmd_run_all} + ): + selinux.fcontext_add_policy(name, sel_type=sel_type) + filespec = re.escape(name) + expected_cmd_shell = f"semanage fcontext -l | egrep '{filespec}'" + mock_cmd_shell.assert_called_once_with( + expected_cmd_shell, + ignore_retcode=True, + ) + expected_cmd_run_all = ( + f"semanage fcontext --modify --type {sel_type} {filespec}" + ) + mock_cmd_run_all.assert_called_once_with( + expected_cmd_run_all, + ) diff --git a/tests/pytests/unit/modules/test_serverdensity_device.py b/tests/pytests/unit/modules/test_serverdensity_device.py new file mode 100644 index 000000000000..6d79aefa8134 --- /dev/null +++ b/tests/pytests/unit/modules/test_serverdensity_device.py @@ -0,0 +1,195 @@ +""" + :codeauthor: Jayesh Kariya + + TestCase for salt.modules.serverdensity_device +""" + + +import pytest + +import salt.modules.serverdensity_device as serverdensity_device +import salt.utils.json +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + + +class MockRequests: + """ + Mock smtplib class + """ + + flag = None + content = """{"message": "Invalid token", "errors": [{"type": "invalid_token", "subject": "token"}]}""" + status_code = None + + def __init__(self): + self.url = None + self.data = None + self.kwargs = None + + def return_request(self, url, data=None, **kwargs): + """ + Mock request method. + """ + self.url = url + self.data = data + self.kwargs = kwargs + requests = MockRequests() + if self.flag == 1: + requests.status_code = 401 + else: + requests.status_code = 200 + return requests + + def post(self, url, data=None, **kwargs): + """ + Mock post method. + """ + return self.return_request(url, data, **kwargs) + + def delete(self, url, **kwargs): + """ + Mock delete method. + """ + return self.return_request(url, **kwargs) + + def get(self, url, **kwargs): + """ + Mock get method. + """ + return self.return_request(url, **kwargs) + + def put(self, url, data=None, **kwargs): + """ + Mock put method. + """ + return self.return_request(url, data, **kwargs) + + +@pytest.fixture +def configure_loader_modules(): + return {serverdensity_device: {"requests": MockRequests()}} + + +@pytest.fixture +def mock_json_loads(): + return MagicMock(side_effect=ValueError()) + + +def test_get_sd_auth(): + """ + Tests if it returns requested Server Density + authentication value from pillar. + """ + with patch.dict(serverdensity_device.__pillar__, {"serverdensity": False}): + pytest.raises(CommandExecutionError, serverdensity_device.get_sd_auth, "1") + + with patch.dict(serverdensity_device.__pillar__, {"serverdensity": {"1": "salt"}}): + assert serverdensity_device.get_sd_auth("1") == "salt" + + pytest.raises(CommandExecutionError, serverdensity_device.get_sd_auth, "2") + + +def test_create(mock_json_loads): + """ + Tests if it create device in Server Density. + """ + with patch.dict( + serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}} + ): + assert serverdensity_device.create("rich_lama", group="lama_band") + + with patch.object(salt.utils.json, "loads", mock_json_loads): + pytest.raises( + CommandExecutionError, + serverdensity_device.create, + "rich_lama", + group="lama_band", + ) + + MockRequests.flag = 1 + assert serverdensity_device.create("rich_lama", group="lama_band") is None + + +def test_delete(mock_json_loads): + """ + Tests if it delete a device from Server Density. + """ + with patch.dict( + serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}} + ): + MockRequests.flag = 0 + assert serverdensity_device.delete("51f7eaf") + + with patch.object(salt.utils.json, "loads", mock_json_loads): + pytest.raises(CommandExecutionError, serverdensity_device.delete, "51f7eaf") + + MockRequests.flag = 1 + assert serverdensity_device.delete("51f7eaf") is None + + +def test_ls(mock_json_loads): + """ + Tests if it list devices in Server Density. + """ + with patch.dict( + serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}} + ): + MockRequests.flag = 0 + assert serverdensity_device.ls(name="lama") + + with patch.object(salt.utils.json, "loads", mock_json_loads): + pytest.raises(CommandExecutionError, serverdensity_device.ls, name="lama") + + MockRequests.flag = 1 + assert serverdensity_device.ls(name="lama") is None + + +def test_update(mock_json_loads): + """ + Tests if it updates device information in Server Density. + """ + with patch.dict( + serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}} + ): + MockRequests.flag = 0 + assert serverdensity_device.update("51f7eaf", name="lama") + + with patch.object(salt.utils.json, "loads", mock_json_loads): + pytest.raises( + CommandExecutionError, + serverdensity_device.update, + "51f7eaf", + name="lama", + ) + + MockRequests.flag = 1 + assert serverdensity_device.update("51f7eaf", name="lama") is None + + +def test_install_agent(): + """ + Tests if it downloads Server Density installation agent, + and installs sd-agent with agent_key. + """ + mock = MagicMock(return_value=True) + with patch.dict( + serverdensity_device.__pillar__, {"serverdensity": {"account_url": "salt"}} + ): + with patch.dict(serverdensity_device.__salt__, {"cmd.run": mock}): + with patch.dict(serverdensity_device.__opts__, {"cachedir": "/"}): + assert serverdensity_device.install_agent("51f7e") + + +def test_install_agent_v2(): + """ + Tests if it downloads Server Density installation agent, + and installs sd-agent with agent_key. + """ + mock = MagicMock(return_value=True) + with patch.dict( + serverdensity_device.__pillar__, {"serverdensity": {"account_name": "salt"}} + ): + with patch.dict(serverdensity_device.__salt__, {"cmd.run": mock}): + with patch.dict(serverdensity_device.__opts__, {"cachedir": "/"}): + assert serverdensity_device.install_agent("51f7e", agent_version=2) diff --git a/tests/pytests/unit/modules/test_servicenow.py b/tests/pytests/unit/modules/test_servicenow.py new file mode 100644 index 000000000000..1971b63f653e --- /dev/null +++ b/tests/pytests/unit/modules/test_servicenow.py @@ -0,0 +1,66 @@ +""" + :codeauthor: Anthony Shaw + + TestCase for salt.modules.servicenow +""" + + +import pytest + +import salt.modules.servicenow as servicenow +from tests.support.mock import MagicMock + + +class MockServiceNowClient: + def __init__(self, instance_name, username, password): + pass + + def get(self, query): + return [{"query_size": len(query), "query_value": query}] + + +@pytest.fixture +def configure_loader_modules(): + module_globals = { + "Client": MockServiceNowClient, + "__salt__": { + "config.option": MagicMock( + return_value={ + "instance_name": "test", + "username": "mr_test", + "password": "test123", + } + ) + }, + } + if servicenow.HAS_LIBS is False: + module_globals["sys.modules"] = {"servicenow_rest": MagicMock()} + module_globals["sys.modules"][ + "servicenow_rest" + ].api.Client = MockServiceNowClient + return {servicenow: module_globals} + + +def test_module_creation(): + client = servicenow._get_client() + assert client is not None + + +def test_non_structured_query(): + result = servicenow.non_structured_query("tests", "role=web") + assert result is not None + assert result[0]["query_size"] == 8 + assert result[0]["query_value"] == "role=web" + + +def test_non_structured_query_kwarg(): + result = servicenow.non_structured_query("tests", role="web") + assert result is not None + assert result[0]["query_size"] == 8 + assert result[0]["query_value"] == "role=web" + + +def test_non_structured_query_kwarg_multi(): + result = servicenow.non_structured_query("tests", role="web", type="computer") + assert result is not None + assert result[0]["query_size"] == 22 diff --git a/tests/unit/modules/test_smtp.py b/tests/pytests/unit/modules/test_smtp.py similarity index 55% rename from tests/unit/modules/test_smtp.py rename to tests/pytests/unit/modules/test_smtp.py index 32298fcdcf95..983bb209cf24 100644 --- a/tests/unit/modules/test_smtp.py +++ b/tests/pytests/unit/modules/test_smtp.py @@ -1,12 +1,14 @@ """ :codeauthor: Jayesh Kariya + + TestCase for salt.modules.smtp """ +import pytest + import salt.modules.smtp as smtp -from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase class SMTPRecipientsRefused(Exception): @@ -221,128 +223,106 @@ def SMTP(self, server): return MockSMTP("server") -class SmtpTestCase(TestCase, LoaderModuleMockMixin): +@pytest.fixture +def configure_loader_modules(): + return {smtp: {"socket": MockSocket(), "smtplib": MockSmtplib()}} + + +# 'send_msg' function tests: 1 + + +def test_send_msg(): """ - TestCase for salt.modules.smtp + Tests if it send a message to an SMTP recipient. """ + mock = MagicMock( + return_value={ + "smtp.server": "", + "smtp.tls": "True", + "smtp.sender": "", + "smtp.username": "", + "smtp.password": "", + } + ) + with patch.dict(smtp.__salt__, {"config.option": mock}): + assert smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", + ) - def setup_loader_modules(self): - return {smtp: {"socket": MockSocket(), "smtplib": MockSmtplib()}} + MockSMTPSSL.flag = 1 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", + ) - # 'send_msg' function tests: 1 + MockSMTPSSL.flag = 2 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", + ) - def test_send_msg(self): - """ - Tests if it send a message to an SMTP recipient. - """ - mock = MagicMock( - return_value={ - "smtp.server": "", - "smtp.tls": "True", - "smtp.sender": "", - "smtp.username": "", - "smtp.password": "", - } + MockSMTPSSL.flag = 3 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", + ) + + MockSMTPSSL.flag = 4 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", + ) + + mock = MagicMock( + return_value={ + "smtp.server": "", + "smtp.tls": "", + "smtp.sender": "", + "smtp.username": "", + "smtp.password": "", + } + ) + with patch.dict(smtp.__salt__, {"config.option": mock}): + MockSMTPSSL.flag = 5 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + username="myuser", + password="verybadpass", + sender="admin@example.com", + server="smtp.domain.com", + ) + + MockSMTP.flag = 1 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", + ) + + MockSMTP.flag = 2 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", ) - with patch.dict(smtp.__salt__, {"config.option": mock}): - self.assertTrue( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) - - MockSMTPSSL.flag = 1 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) - - MockSMTPSSL.flag = 2 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) - - MockSMTPSSL.flag = 3 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) - - MockSMTPSSL.flag = 4 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) - - mock = MagicMock( - return_value={ - "smtp.server": "", - "smtp.tls": "", - "smtp.sender": "", - "smtp.username": "", - "smtp.password": "", - } + + MockSMTP.flag = 3 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", + ) + + MockSmtplib.flag = 1 + assert not smtp.send_msg( + "admin@example.com", + "This is a salt module test", + profile="my-smtp-account", ) - with patch.dict(smtp.__salt__, {"config.option": mock}): - MockSMTPSSL.flag = 5 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - username="myuser", - password="verybadpass", - sender="admin@example.com", - server="smtp.domain.com", - ) - ) - - MockSMTP.flag = 1 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) - - MockSMTP.flag = 2 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) - - MockSMTP.flag = 3 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) - - MockSmtplib.flag = 1 - self.assertFalse( - smtp.send_msg( - "admin@example.com", - "This is a salt module test", - profile="my-smtp-account", - ) - ) diff --git a/tests/pytests/unit/modules/test_status.py b/tests/pytests/unit/modules/test_status.py index 9b3bcb82bef4..e0bb270df668 100644 --- a/tests/pytests/unit/modules/test_status.py +++ b/tests/pytests/unit/modules/test_status.py @@ -1,8 +1,16 @@ +""" + Test cases for salt.modules.status +""" + + +import os + import pytest import salt.modules.status as status +import salt.utils.platform from salt.exceptions import CommandExecutionError -from tests.support.mock import MagicMock, Mock, patch +from tests.support.mock import MagicMock, Mock, mock_open, patch @pytest.fixture @@ -73,3 +81,402 @@ def test_custom(): with patch.dict(status.__salt__, {"config.dot_vals": mock2}): with patch.dict(status.__salt__, {"status.uptime": mock}): assert status.custom() == {"day": "UNKNOWN"} + + +def _set_up_test_uptime(): + """ + Define common mock data for status.uptime tests + """ + + class MockData: + """ + Store mock data + """ + + m = MockData() + m.now = 1477004312 + m.ut = 1540154.00 + m.idle = 3047777.32 + m.ret = { + "users": 3, + "seconds": 1540154, + "since_t": 1475464158, + "days": 17, + "since_iso": "2016-10-03T03:09:18", + "time": "19:49", + } + + return m + + +def _set_up_test_uptime_sunos(): + """ + Define common mock data for cmd.run_all for status.uptime on SunOS + """ + + class MockData: + """ + Store mock data + """ + + m = MockData() + m.ret = { + "retcode": 0, + "stdout": "unix:0:system_misc:boot_time 1475464158", + } + + return m + + +def test_uptime_linux(): + """ + Test modules.status.uptime function for Linux + """ + m = _set_up_test_uptime() + + with patch.multiple( + salt.utils.platform, + is_linux=MagicMock(return_value=True), + is_sunos=MagicMock(return_value=False), + is_darwin=MagicMock(return_value=False), + is_freebsd=MagicMock(return_value=False), + is_openbsd=MagicMock(return_value=False), + is_netbsd=MagicMock(return_value=False), + ), patch("salt.utils.path.which", MagicMock(return_value=True)), patch.dict( + status.__salt__, + {"cmd.run": MagicMock(return_value=os.linesep.join(["1", "2", "3"]))}, + ), patch( + "time.time", MagicMock(return_value=m.now) + ), patch( + "os.path.exists", MagicMock(return_value=True) + ): + proc_uptime = salt.utils.stringutils.to_str("{} {}".format(m.ut, m.idle)) + + with patch("salt.utils.files.fopen", mock_open(read_data=proc_uptime)): + ret = status.uptime() + assert ret == m.ret + with patch("os.path.exists", MagicMock(return_value=False)): + with pytest.raises(CommandExecutionError): + status.uptime() + + +def test_uptime_sunos(): + """ + Test modules.status.uptime function for SunOS + """ + m = _set_up_test_uptime() + m2 = _set_up_test_uptime_sunos() + with patch.multiple( + salt.utils.platform, + is_linux=MagicMock(return_value=False), + is_sunos=MagicMock(return_value=True), + is_darwin=MagicMock(return_value=False), + is_freebsd=MagicMock(return_value=False), + is_openbsd=MagicMock(return_value=False), + is_netbsd=MagicMock(return_value=False), + ), patch("salt.utils.path.which", MagicMock(return_value=True)), patch.dict( + status.__salt__, + { + "cmd.run": MagicMock(return_value=os.linesep.join(["1", "2", "3"])), + "cmd.run_all": MagicMock(return_value=m2.ret), + }, + ), patch( + "time.time", MagicMock(return_value=m.now) + ): + ret = status.uptime() + assert ret == m.ret + + +def test_uptime_macos(): + """ + Test modules.status.uptime function for macOS + """ + m = _set_up_test_uptime() + + kern_boottime = ( + "{{ sec = {0}, usec = {1:0<6} }} Mon Oct 03 03:09:18.23 2016".format( + *str(m.now - m.ut).split(".") + ) + ) + with patch.multiple( + salt.utils.platform, + is_linux=MagicMock(return_value=False), + is_sunos=MagicMock(return_value=False), + is_darwin=MagicMock(return_value=True), + is_freebsd=MagicMock(return_value=False), + is_openbsd=MagicMock(return_value=False), + is_netbsd=MagicMock(return_value=False), + ), patch("salt.utils.path.which", MagicMock(return_value=True)), patch.dict( + status.__salt__, + { + "cmd.run": MagicMock(return_value=os.linesep.join(["1", "2", "3"])), + "sysctl.get": MagicMock(return_value=kern_boottime), + }, + ), patch( + "time.time", MagicMock(return_value=m.now) + ): + + ret = status.uptime() + assert ret == m.ret + + with patch.dict(status.__salt__, {"sysctl.get": MagicMock(return_value="")}): + with pytest.raises(CommandExecutionError): + status.uptime() + + +def test_uptime_return_success_not_supported(): + """ + Test modules.status.uptime function for other platforms + """ + with patch.multiple( + salt.utils.platform, + is_linux=MagicMock(return_value=False), + is_sunos=MagicMock(return_value=False), + is_darwin=MagicMock(return_value=False), + is_freebsd=MagicMock(return_value=False), + is_openbsd=MagicMock(return_value=False), + is_netbsd=MagicMock(return_value=False), + ): + exc_mock = MagicMock(side_effect=CommandExecutionError) + with pytest.raises(CommandExecutionError): + with patch.dict(status.__salt__, {"cmd.run": exc_mock}): + status.uptime() + + +def _set_up_test_cpustats_openbsd(): + """ + Define mock data for status.cpustats on OpenBSD + """ + + class MockData: + """ + Store mock data + """ + + m = MockData() + m.ret = { + "0": { + "User": "0.0%", + "Nice": "0.0%", + "System": "4.5%", + "Interrupt": "0.5%", + "Idle": "95.0%", + } + } + + return m + + +def test_cpustats_openbsd(): + """ + Test modules.status.cpustats function for OpenBSD + """ + m = _set_up_test_cpustats_openbsd() + + systat = ( + "\n\n 1 users Load 0.20 0.07 0.05 salt.localdomain" + " 09:42:42\nCPU User Nice System " + " Interrupt Idle\n0 0.0% 0.0% " + " 4.5% 0.5% 95.0%\n" + ) + + with patch.multiple( + salt.utils.platform, + is_linux=MagicMock(return_value=False), + is_sunos=MagicMock(return_value=False), + is_darwin=MagicMock(return_value=False), + is_freebsd=MagicMock(return_value=False), + is_openbsd=MagicMock(return_value=True), + is_netbsd=MagicMock(return_value=False), + ), patch("salt.utils.path.which", MagicMock(return_value=True)), patch.dict( + status.__grains__, {"kernel": "OpenBSD"} + ), patch.dict( + status.__salt__, {"cmd.run": MagicMock(return_value=systat)} + ): + ret = status.cpustats() + assert ret == m.ret + + +def _set_up_test_cpuinfo_bsd(): + class MockData: + """ + Store mock data + """ + + m = MockData() + m.ret = { + "hw.model": "Intel(R) Core(TM) i5-7287U CPU @ 3.30GHz", + "hw.ncpu": "4", + } + + return m + + +def test_cpuinfo_freebsd(): + m = _set_up_test_cpuinfo_bsd() + sysctl = "hw.model:Intel(R) Core(TM) i5-7287U CPU @ 3.30GHz\nhw.ncpu:4" + + with patch.dict(status.__grains__, {"kernel": "FreeBSD"}): + with patch.dict(status.__salt__, {"cmd.run": MagicMock(return_value=sysctl)}): + ret = status.cpuinfo() + assert ret == m.ret + + +def test_cpuinfo_openbsd(): + m = _set_up_test_cpuinfo_bsd() + sysctl = "hw.model=Intel(R) Core(TM) i5-7287U CPU @ 3.30GHz\nhw.ncpu=4" + + for bsd in ["NetBSD", "OpenBSD"]: + with patch.dict(status.__grains__, {"kernel": bsd}): + with patch.dict( + status.__salt__, {"cmd.run": MagicMock(return_value=sysctl)} + ): + ret = status.cpuinfo() + assert ret == m.ret + + +def _set_up_test_meminfo_openbsd(): + class MockData: + """ + Store mock data + """ + + m = MockData() + m.ret = { + "active virtual pages": "355M", + "free list size": "305M", + "page faults": "845", + "pages reclaimed": "1", + "pages paged in": "2", + "pages paged out": "3", + "pages freed": "4", + "pages scanned": "5", + } + + return m + + +def test_meminfo_openbsd(): + m = _set_up_test_meminfo_openbsd() + vmstat = ( + " procs memory page disks traps " + " cpu\n r s avm fre flt re pi po fr sr cd0 sd0 int sys " + " cs us sy id\n 2 103 355M 305M 845 1 2 3 4 5 0 1 21 " + " 682 86 1 1 98" + ) + + with patch.dict(status.__grains__, {"kernel": "OpenBSD"}): + with patch.dict(status.__salt__, {"cmd.run": MagicMock(return_value=vmstat)}): + ret = status.meminfo() + assert ret == m.ret + + +def _set_up_test_w_linux(): + """ + Define mock data for status.w on Linux + """ + + class MockData: + """ + Store mock data + """ + + m = MockData() + m.ret = [ + { + "idle": "0s", + "jcpu": "0.24s", + "login": "13:42", + "pcpu": "0.16s", + "tty": "pts/1", + "user": "root", + "what": "nmap -sV 10.2.2.2", + } + ] + + return m + + +def _set_up_test_w_bsd(): + """ + Define mock data for status.w on Linux + """ + + class MockData: + """ + Store mock data + """ + + m = MockData() + m.ret = [ + { + "idle": "0", + "from": "10.2.2.1", + "login": "1:42PM", + "tty": "p1", + "user": "root", + "what": "nmap -sV 10.2.2.2", + } + ] + + return m + + +def test_w_linux(): + m = _set_up_test_w_linux() + w_output = "root pts/1 13:42 0s 0.24s 0.16s nmap -sV 10.2.2.2" + + with patch.dict(status.__grains__, {"kernel": "Linux"}): + with patch.dict(status.__salt__, {"cmd.run": MagicMock(return_value=w_output)}): + ret = status.w() + assert ret == m.ret + + +def test_w_bsd(): + m = _set_up_test_w_bsd() + w_output = "root p1 10.2.2.1 1:42PM 0 nmap -sV 10.2.2.2" + + for bsd in ["Darwin", "FreeBSD", "OpenBSD"]: + with patch.dict(status.__grains__, {"kernel": bsd}): + with patch.dict( + status.__salt__, {"cmd.run": MagicMock(return_value=w_output)} + ): + ret = status.w() + assert ret == m.ret + + +def _set_up_test_status_pid_linux(): + class MockData: + """ + Store mock data + """ + + m = MockData() + m.ret = "2701\n7539\n7540\n7542\n7623" + return m + + +def test_status_pid_linux(): + m = _set_up_test_status_pid_linux() + ps = ( + "UID PID PPID C STIME TTY TIME CMD\nroot 360 2 0 Jun08 ?" + " 00:00:00 [jbd2/dm-0-8]\nroot 947 2 0 Jun08 ? 00:00:00 " + " [jbd2/dm-1-8]\nroot 949 2 0 Jun08 ? 00:00:09 " + " [jbd2/dm-3-8]\nroot 951 2 0 Jun08 ? 00:00:00 " + " [jbd2/dm-4-8]\nroot 2701 1 0 Jun08 ? 00:00:28 /usr/sbin/httpd" + " -k start\napache 7539 2701 0 04:40 ? 00:00:04 /usr/sbin/httpd -k" + " start\napache 7540 2701 0 04:40 ? 00:00:02 /usr/sbin/httpd -k" + " start\napache 7542 2701 0 04:40 ? 00:01:46 /usr/sbin/httpd -k" + " start\napache 7623 2701 0 04:40 ? 00:02:41 /usr/sbin/httpd -k" + " start\nroot 1564 1 0 Jun11 ? 00:07:19 /usr/bin/python3" + " /usr/bin/salt-minion -d\nroot 6674 1564 0 19:53 ? 00:00:00 " + " /usr/bin/python3 /usr/bin/salt-call status.pid httpd -l debug" + ) + + with patch.dict(status.__grains__, {"ps": "ps -efHww"}): + with patch.dict( + status.__salt__, {"cmd.run_stdout": MagicMock(return_value=ps)} + ): + with patch.object(os, "getpid", return_value="6674"): + ret = status.pid("httpd") + assert ret == m.ret diff --git a/tests/pytests/unit/modules/test_swift.py b/tests/pytests/unit/modules/test_swift.py new file mode 100644 index 000000000000..f8c0f3133875 --- /dev/null +++ b/tests/pytests/unit/modules/test_swift.py @@ -0,0 +1,55 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.swift +""" + + +import pytest + +import salt.modules.swift as swift +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {swift: {}} + + +def test_delete(): + """ + Test for delete a container, or delete an object from a container. + """ + with patch.object(swift, "_auth", MagicMock()): + assert swift.delete("mycontainer") + + assert swift.delete("mycontainer", path="myfile.png") + + +def test_get(): + """ + Test for list the contents of a container, + or return an object from a container. + """ + with patch.object(swift, "_auth", MagicMock()): + assert swift.get() + + assert swift.get("mycontainer") + + assert swift.get("mycontainer", path="myfile.png", return_bin=True) + + assert swift.get("mycontainer", path="myfile.png", local_file="/tmp/myfile.png") + + assert not swift.get("mycontainer", path="myfile.png") + + +def test_put(): + """ + Test for create a new container, or upload an object to a container. + """ + with patch.object(swift, "_auth", MagicMock()): + assert swift.put("mycontainer") + + assert swift.put("mycontainer", path="myfile.png", local_file="/tmp/myfile.png") + + assert not swift.put("mycontainer", path="myfile.png") diff --git a/tests/pytests/unit/modules/test_syslog_ng.py b/tests/pytests/unit/modules/test_syslog_ng.py new file mode 100644 index 000000000000..5a4b2f57942f --- /dev/null +++ b/tests/pytests/unit/modules/test_syslog_ng.py @@ -0,0 +1,357 @@ +""" + Test cases for salt.modules.syslog_ng +""" + + +import os +from textwrap import dedent + +import pytest + +import salt.modules.syslog_ng as syslog_ng +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def _version(): + return "3.6.0alpha0" + + +@pytest.fixture +def _modules(): + return ( + "syslogformat,json-plugin,basicfuncs,afstomp,afsocket,cryptofuncs," + "afmongodb,dbparser,system-source,affile,pseudofile,afamqp," + "afsocket-notls,csvparser,linux-kmsg-format,afuser,confgen,afprog" + ) + + +@pytest.fixture +def version_output(_version, _modules): + return """syslog-ng {0} +Installer-Version: {0} +Revision: +Compile-Date: Apr 4 2014 20:26:18 +Error opening plugin module; module='afsocket-tls', error='/home/tibi/install/syslog-ng/lib/syslog-ng/libafsocket-tls.so: undefined symbol: tls_context_setup_session' +Available-Modules: {1} +Enable-Debug: on +Enable-GProf: off +Enable-Memtrace: off +Enable-IPv6: on +Enable-Spoof-Source: off +Enable-TCP-Wrapper: off +Enable-Linux-Caps: off""".format( + _version, _modules + ) + + +@pytest.fixture +def stats_output(): + return """SourceName;SourceId;SourceInstance;State;Type;Number +center;;received;a;processed;0 +destination;#anon-destination0;;a;processed;0 +destination;#anon-destination1;;a;processed;0 +source;s_gsoc2014;;a;processed;0 +center;;queued;a;processed;0 +global;payload_reallocs;;a;processed;0 +global;sdata_updates;;a;processed;0 +global;msg_clones;;a;processed;0""" + + +@pytest.fixture +def orig_env(): + return {"PATH": "/foo:/bar"} + + +@pytest.fixture +def bin_dir(): + return "/baz" + + +@pytest.fixture +def mocked_env(): + return {"PATH": "/foo:/bar:/baz"} + + +@pytest.fixture +def configure_loader_modules(): + return {syslog_ng: {}} + + +def test_statement_without_options(): + s = syslog_ng.Statement("source", "s_local", options=[]) + b = s.build() + assert b == ( + dedent( + """\ + source s_local { + }; + """ + ) + ) + + +def test_non_empty_statement(): + o1 = syslog_ng.Option("file") + o2 = syslog_ng.Option("tcp") + s = syslog_ng.Statement("source", "s_local", options=[o1, o2]) + b = s.build() + assert b == ( + dedent( + """\ + source s_local { + file( + ); + tcp( + ); + }; + """ + ) + ) + + +def test_option_with_parameters(): + o1 = syslog_ng.Option("file") + p1 = syslog_ng.SimpleParameter('"/var/log/messages"') + p2 = syslog_ng.SimpleParameter() + p3 = syslog_ng.TypedParameter() + p3.type = "tls" + p2.value = '"/var/log/syslog"' + o1.add_parameter(p1) + o1.add_parameter(p2) + o1.add_parameter(p3) + b = o1.build() + assert b == ( + dedent( + """\ + file( + "/var/log/messages", + "/var/log/syslog", + tls( + ) + ); + """ + ) + ) + + +def test_parameter_with_values(): + p = syslog_ng.TypedParameter() + p.type = "tls" + v1 = syslog_ng.TypedParameterValue() + v1.type = "key_file" + + v2 = syslog_ng.TypedParameterValue() + v2.type = "cert_file" + + p.add_value(v1) + p.add_value(v2) + + b = p.build() + assert b == ( + dedent( + """\ + tls( + key_file( + ), + cert_file( + ) + )""" + ) + ) + + +def test_value_with_arguments(): + t = syslog_ng.TypedParameterValue() + t.type = "key_file" + + a1 = syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"') + a2 = syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"') + + t.add_argument(a1) + t.add_argument(a2) + + b = t.build() + assert b == ( + dedent( + """\ + key_file( + "/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key" + "/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key" + )""" + ) + ) + + +def test_end_to_end_statement_generation(): + s = syslog_ng.Statement("source", "s_tls") + + o = syslog_ng.Option("tcp") + + ip = syslog_ng.TypedParameter("ip") + ip.add_value(syslog_ng.SimpleParameterValue("'192.168.42.2'")) + o.add_parameter(ip) + + port = syslog_ng.TypedParameter("port") + port.add_value(syslog_ng.SimpleParameterValue(514)) + o.add_parameter(port) + + tls = syslog_ng.TypedParameter("tls") + key_file = syslog_ng.TypedParameterValue("key_file") + key_file.add_argument( + syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"') + ) + cert_file = syslog_ng.TypedParameterValue("cert_file") + cert_file.add_argument( + syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/cert.d/syslog-ng.cert"') + ) + peer_verify = syslog_ng.TypedParameterValue("peer_verify") + peer_verify.add_argument(syslog_ng.Argument("optional-untrusted")) + tls.add_value(key_file) + tls.add_value(cert_file) + tls.add_value(peer_verify) + o.add_parameter(tls) + + s.add_child(o) + b = s.build() + assert b == ( + dedent( + """\ + source s_tls { + tcp( + ip( + '192.168.42.2' + ), + port( + 514 + ), + tls( + key_file( + "/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key" + ), + cert_file( + "/opt/syslog-ng/etc/syslog-ng/cert.d/syslog-ng.cert" + ), + peer_verify( + optional-untrusted + ) + ) + ); + }; + """ + ) + ) + + +@pytest.mark.skip_on_windows(reason="Module not available on Windows") +def test_version(_version, version_output, orig_env, bin_dir, mocked_env): + cmd_ret = {"retcode": 0, "stdout": version_output} + expected_output = {"retcode": 0, "stdout": _version} + cmd_args = ["syslog-ng", "-V"] + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + result = syslog_ng.version() + assert result == expected_output + cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + result = syslog_ng.version(syslog_ng_sbin_dir=bin_dir) + assert result == expected_output + cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False) + + +@pytest.mark.skip_on_windows(reason="Module not available on Windows") +def test_stats(stats_output, orig_env, bin_dir, mocked_env): + cmd_ret = {"retcode": 0, "stdout": stats_output} + cmd_args = ["syslog-ng-ctl", "stats"] + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + result = syslog_ng.stats() + assert result == cmd_ret + cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + result = syslog_ng.stats(syslog_ng_sbin_dir=bin_dir) + assert result == cmd_ret + cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False) + + +@pytest.mark.skip_on_windows(reason="Module not available on Windows") +def test_modules(_modules, version_output, orig_env, bin_dir, mocked_env): + cmd_ret = {"retcode": 0, "stdout": version_output} + expected_output = {"retcode": 0, "stdout": _modules} + cmd_args = ["syslog-ng", "-V"] + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + result = syslog_ng.modules() + assert result == expected_output + cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + result = syslog_ng.modules(syslog_ng_sbin_dir=bin_dir) + assert result == expected_output + cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False) + + +@pytest.mark.skip_on_windows(reason="Module not available on Windows") +def test_config_test(orig_env, bin_dir, mocked_env): + cmd_ret = {"retcode": 0, "stderr": "", "stdout": "Foo"} + cmd_args = ["syslog-ng", "--syntax-only"] + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + result = syslog_ng.config_test() + assert result == cmd_ret + cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + result = syslog_ng.config_test(syslog_ng_sbin_dir=bin_dir) + assert result == cmd_ret + cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False) + + +@pytest.mark.skip_on_windows(reason="Module not available on Windows") +def test_config_test_cfgfile(orig_env, bin_dir, mocked_env): + cfgfile = "/path/to/syslog-ng.conf" + cmd_ret = {"retcode": 1, "stderr": "Syntax error...", "stdout": ""} + cmd_args = ["syslog-ng", "--syntax-only", "--cfgfile={}".format(cfgfile)] + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + assert syslog_ng.config_test(cfgfile=cfgfile) == cmd_ret + cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) + + cmd_mock = MagicMock(return_value=cmd_ret) + with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( + os.environ, orig_env + ): + assert ( + syslog_ng.config_test(syslog_ng_sbin_dir=bin_dir, cfgfile=cfgfile) + == cmd_ret + ) + cmd_mock.assert_called_once_with(cmd_args, env=mocked_env, python_shell=False) diff --git a/tests/pytests/unit/modules/test_telegram.py b/tests/pytests/unit/modules/test_telegram.py new file mode 100644 index 000000000000..568c01002b26 --- /dev/null +++ b/tests/pytests/unit/modules/test_telegram.py @@ -0,0 +1,79 @@ +""" + :codeauthor: :email:`Roald Nefs (info@roaldnefs.com)` + + Test cases for salt.modules.telegram. +""" + + +import pytest + +import salt.modules.telegram as telegram +from tests.support.mock import MagicMock, Mock + + +class RequestMock(Mock): + """ + Request Mock + """ + + def get(self, *args, **kwargs): + return RequestResponseMock() + + def put(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + return RequestPutResponseMock() + + def delete(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + return RequestResponseMock() + + +class RequestResponseMock(Mock): + """ + Request Response Mock + """ + + def json(self): + return [ + {"url": "http://example.org", "_id": 1234}, + ] + + +class RequestPutResponseMock(Mock): + """ + Request Put Response Mock + """ + + ok = True + + def json(self): + return {"_id": 4321} + + +@pytest.fixture +def configure_loader_modules(): + module_globals = { + "__salt__": { + "config.get": MagicMock( + return_value={ + "telegram": { + "chat_id": "123456789", + "token": "000000000:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", + } + } + ), + "requests.put": Mock(), + }, + "requests": RequestMock(), + } + return {telegram: module_globals} + + +def test_post_message(): + """ + Test the post_message function. + """ + message = "Hello World!" + assert telegram.post_message(message) diff --git a/tests/pytests/unit/modules/test_uptime.py b/tests/pytests/unit/modules/test_uptime.py new file mode 100644 index 000000000000..2e129a7c9bbd --- /dev/null +++ b/tests/pytests/unit/modules/test_uptime.py @@ -0,0 +1,83 @@ +""" + Test cases for salt.modules.uptime +""" + + +import pytest + +import salt.modules.uptime as uptime +from salt.exceptions import CommandExecutionError +from tests.support.mock import Mock + + +class RequestMock(Mock): + """ + Request Mock + """ + + def get(self, *args, **kwargs): + return RequestResponseMock() + + def put(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + return RequestPutResponseMock() + + def delete(self, *args, **kwargs): + self.args = args + self.kwargs = kwargs + return RequestResponseMock() + + +class RequestResponseMock(Mock): + def json(self): + return [ + {"url": "http://example.org", "_id": 1234}, + ] + + +class RequestPutResponseMock(Mock): + + ok = True + + def json(self): + return {"_id": 4321} + + +@pytest.fixture +def request_mock(): + return RequestMock() + + +@pytest.fixture +def configure_loader_modules(request_mock): + return { + uptime: { + "__salt__": { + "pillar.get": Mock(return_value="http://localhost:5000"), + "requests.put": Mock(), + }, + "requests": request_mock, + } + } + + +def test_checks_list(): + ret = uptime.checks_list() + assert ret == ["http://example.org"] + + +def test_checks_exists(): + assert uptime.check_exists("http://example.org") is True + + +def test_checks_create(request_mock): + pytest.raises(CommandExecutionError, uptime.create, "http://example.org") + assert uptime.create("http://example.com") == 4321 + assert request_mock.args == ("http://localhost:5000/api/checks",) + + +def test_checks_delete(request_mock): + pytest.raises(CommandExecutionError, uptime.delete, "http://example.com") + assert uptime.delete("http://example.org") is True + assert request_mock.args == ("http://localhost:5000/api/checks/1234",) diff --git a/tests/pytests/unit/modules/test_uwsgi.py b/tests/pytests/unit/modules/test_uwsgi.py new file mode 100644 index 000000000000..5c97c191ce88 --- /dev/null +++ b/tests/pytests/unit/modules/test_uwsgi.py @@ -0,0 +1,27 @@ +""" + Test cases for salt.modules.uswgi +""" + + +import pytest + +import salt.modules.uwsgi as uwsgi +from tests.support.mock import MagicMock, Mock, patch + + +@pytest.fixture +def configure_loader_modules(): + with patch("salt.utils.path.which", Mock(return_value="/usr/bin/uwsgi")): + return {uwsgi: {}} + + +def test_uwsgi_stats(): + socket = "127.0.0.1:5050" + mock = MagicMock(return_value='{"a": 1, "b": 2}') + with patch.dict(uwsgi.__salt__, {"cmd.run": mock}): + result = uwsgi.stats(socket) + mock.assert_called_once_with( + ["uwsgi", "--connect-and-read", "{}".format(socket)], + python_shell=False, + ) + assert result == {"a": 1, "b": 2} diff --git a/tests/pytests/unit/modules/test_vagrant.py b/tests/pytests/unit/modules/test_vagrant.py new file mode 100644 index 000000000000..2a4b5de0b565 --- /dev/null +++ b/tests/pytests/unit/modules/test_vagrant.py @@ -0,0 +1,167 @@ +""" + TestCase for the salt.modules.vagrant module. +""" + + +import pytest + +import salt.exceptions +import salt.modules.vagrant as vagrant +import salt.utils.platform +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def local_opts(tmp_path): + return { + "extension_modules": "", + "vagrant_sdb_data": { + "driver": "sqlite3", + "database": str(tmp_path / "test_vagrant.sqlite"), + "table": "sdb", + "create_table": True, + }, + } + + +@pytest.fixture +def configure_loader_modules(local_opts): + return {vagrant: {"__opts__": local_opts}} + + +def test_vagrant_get_vm_info_not_found(): + mock_sdb = MagicMock(return_value=None) + with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}): + with pytest.raises(salt.exceptions.SaltInvocationError): + vagrant.get_vm_info("thisNameDoesNotExist") + + +def test_vagrant_init_positional(local_opts, tmp_path): + path_nowhere = str(tmp_path / "tmp" / "nowhere") + mock_sdb = MagicMock(return_value=None) + with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}): + resp = vagrant.init( + "test1", + path_nowhere, + "onetest", + "nobody", + False, + "french", + {"different": "very"}, + ) + assert resp.startswith("Name test1 defined") + expected = dict( + name="test1", + cwd=path_nowhere, + machine="onetest", + runas="nobody", + vagrant_provider="french", + different="very", + ) + mock_sdb.assert_called_with( + "sdb://vagrant_sdb_data/onetest?{}".format(path_nowhere), + "test1", + local_opts, + ) + mock_sdb.assert_any_call("sdb://vagrant_sdb_data/test1", expected, local_opts) + + +def test_vagrant_get_vm_info(): + testdict = {"testone": "one", "machine": "two"} + mock_sdb = MagicMock(return_value=testdict) + with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}): + resp = vagrant.get_vm_info("test1") + assert resp == testdict + + +def test_vagrant_init_dict(local_opts): + testdict = dict( + cwd="/tmp/anywhere", + machine="twotest", + runas="somebody", + vagrant_provider="english", + ) + expected = testdict.copy() + expected["name"] = "test2" + mock_sdb = MagicMock(return_value=None) + with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}): + vagrant.init("test2", vm=testdict) + mock_sdb.assert_any_call("sdb://vagrant_sdb_data/test2", expected, local_opts) + + +def test_vagrant_init_arg_override(local_opts): + testdict = dict( + cwd="/tmp/there", + machine="treetest", + runas="anybody", + vagrant_provider="spansh", + ) + mock_sdb = MagicMock(return_value=None) + with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}): + vagrant.init( + "test3", + cwd="/tmp", + machine="threetest", + runas="him", + vagrant_provider="polish", + vm=testdict, + ) + expected = dict( + name="test3", + cwd="/tmp", + machine="threetest", + runas="him", + vagrant_provider="polish", + ) + mock_sdb.assert_any_call("sdb://vagrant_sdb_data/test3", expected, local_opts) + + +def test_vagrant_get_ssh_config_fails(): + mock_sdb = MagicMock(return_value=None) + with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}): + mock_sdb = MagicMock(return_value={}) + with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}): + vagrant.init("test3", cwd="/tmp") + with pytest.raises(salt.exceptions.SaltInvocationError): + vagrant.get_ssh_config("test3") # has not been started + + +def test_vagrant_destroy(local_opts, tmp_path): + path_mydir = str(tmp_path / "my" / "dir") + mock_cmd = MagicMock(return_value={"retcode": 0}) + with patch.dict(vagrant.__salt__, {"cmd.run_all": mock_cmd}): + mock_sdb = MagicMock(return_value=None) + with patch.dict(vagrant.__utils__, {"sdb.sdb_delete": mock_sdb}): + mock_sdb_get = MagicMock( + return_value={"machine": "macfour", "cwd": path_mydir} + ) + with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb_get}): + assert vagrant.destroy("test4") + mock_sdb.assert_any_call( + "sdb://vagrant_sdb_data/macfour?{}".format(path_mydir), + local_opts, + ) + mock_sdb.assert_any_call("sdb://vagrant_sdb_data/test4", local_opts) + cmd = "vagrant destroy -f macfour" + mock_cmd.assert_called_with( + cmd, runas=None, cwd=path_mydir, output_loglevel="info" + ) + + +def test_vagrant_start(): + mock_cmd = MagicMock(return_value={"retcode": 0}) + with patch.dict(vagrant.__salt__, {"cmd.run_all": mock_cmd}): + mock_sdb_get = MagicMock( + return_value={ + "machine": "five", + "cwd": "/the/dir", + "runas": "me", + "vagrant_provider": "him", + } + ) + with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb_get}): + assert vagrant.start("test5") + cmd = "vagrant up five --provider=him" + mock_cmd.assert_called_with( + cmd, runas="me", cwd="/the/dir", output_loglevel="info" + ) diff --git a/tests/pytests/unit/modules/test_vault.py b/tests/pytests/unit/modules/test_vault.py new file mode 100644 index 000000000000..c72c958f4f23 --- /dev/null +++ b/tests/pytests/unit/modules/test_vault.py @@ -0,0 +1,161 @@ +""" +Test case for the vault execution module +""" + + +import pytest + +import salt.modules.vault as vault +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return { + vault: { + "__grains__": {"id": "foo"}, + "__utils__": { + "vault.is_v2": MagicMock( + return_value={ + "v2": True, + "data": "secrets/data/mysecret", + "metadata": "secrets/metadata/mysecret", + "type": "kv", + } + ), + }, + }, + } + + +@pytest.fixture +def path(): + return "foo/bar/" + + +def test_read_secret_v1(): + """ + Test salt.modules.vault.read_secret function + """ + version = {"v2": False, "data": None, "metadata": None, "type": None} + mock_version = MagicMock(return_value=version) + mock_vault = MagicMock() + mock_vault.return_value.status_code = 200 + mock_vault.return_value.json.return_value = {"data": {"key": "test"}} + with patch.dict(vault.__utils__, {"vault.make_request": mock_vault}), patch.dict( + vault.__utils__, {"vault.is_v2": mock_version} + ): + vault_return = vault.read_secret("/secret/my/secret") + + assert vault_return == {"key": "test"} + + +def test_read_secret_v1_key(): + """ + Test salt.modules.vault.read_secret function specifying key + """ + version = {"v2": False, "data": None, "metadata": None, "type": None} + mock_version = MagicMock(return_value=version) + mock_vault = MagicMock() + mock_vault.return_value.status_code = 200 + mock_vault.return_value.json.return_value = {"data": {"key": "somevalue"}} + with patch.dict(vault.__utils__, {"vault.make_request": mock_vault}), patch.dict( + vault.__utils__, {"vault.is_v2": mock_version} + ): + vault_return = vault.read_secret("/secret/my/secret", "key") + + assert vault_return == "somevalue" + + +def test_read_secret_v2(): + """ + Test salt.modules.vault.read_secret function for v2 of kv secret backend + """ + # given path secrets/mysecret generate v2 output + version = { + "v2": True, + "data": "secrets/data/mysecret", + "metadata": "secrets/metadata/mysecret", + "type": "kv", + } + mock_version = MagicMock(return_value=version) + mock_vault = MagicMock() + mock_vault.return_value.status_code = 200 + v2_return = { + "data": { + "data": {"akey": "avalue"}, + "metadata": { + "created_time": "2018-10-23T20:21:55.042755098Z", + "destroyed": False, + "version": 13, + "deletion_time": "", + }, + } + } + + mock_vault.return_value.json.return_value = v2_return + with patch.dict(vault.__utils__, {"vault.make_request": mock_vault}), patch.dict( + vault.__utils__, {"vault.is_v2": mock_version} + ): + # Validate metadata returned + vault_return = vault.read_secret("/secret/my/secret", metadata=True) + assert "data" in vault_return + assert "metadata" in vault_return + # Validate just data returned + vault_return = vault.read_secret("/secret/my/secret") + assert "akey" in vault_return + + +def test_read_secret_v2_key(): + """ + Test salt.modules.vault.read_secret function for v2 of kv secret backend + with specified key + """ + # given path secrets/mysecret generate v2 output + version = { + "v2": True, + "data": "secrets/data/mysecret", + "metadata": "secrets/metadata/mysecret", + "type": "kv", + } + mock_version = MagicMock(return_value=version) + mock_vault = MagicMock() + mock_vault.return_value.status_code = 200 + v2_return = { + "data": { + "data": {"akey": "avalue"}, + "metadata": { + "created_time": "2018-10-23T20:21:55.042755098Z", + "destroyed": False, + "version": 13, + "deletion_time": "", + }, + } + } + + mock_vault.return_value.json.return_value = v2_return + with patch.dict(vault.__utils__, {"vault.make_request": mock_vault}), patch.dict( + vault.__utils__, {"vault.is_v2": mock_version} + ): + vault_return = vault.read_secret("/secret/my/secret", "akey") + + assert vault_return == "avalue" + + +def test_read_secret_with_default(path): + assert vault.read_secret(path, default="baz") == "baz" + + +def test_read_secret_no_default(path): + with pytest.raises(CommandExecutionError): + vault.read_secret(path) + + +def test_list_secrets_with_default(path): + assert vault.list_secrets(path, default=["baz"]) == ["baz"] + + +def test_list_secrets_no_default(path): + with pytest.raises(CommandExecutionError): + vault.list_secrets(path) diff --git a/tests/pytests/unit/modules/test_win_file.py b/tests/pytests/unit/modules/test_win_file.py index efcdb31a550b..83667bb63771 100644 --- a/tests/pytests/unit/modules/test_win_file.py +++ b/tests/pytests/unit/modules/test_win_file.py @@ -1,13 +1,43 @@ +import os import re import pytest import salt.modules.win_file as win_file +import salt.utils.user +import salt.utils.win_dacl from salt.exceptions import CommandExecutionError +from tests.support.mock import patch pytestmark = [pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows] +@pytest.fixture +def configure_loader_modules(): + return { + win_file: {}, + salt.utils.win_dacl: {}, + } + + +def test__virtual__not_windows(): + with patch("salt.utils.platform.is_windows", autospec=True, return_value=False): + expected = (False, "Module win_file: Missing Win32 modules") + result = win_file.__virtual__() + assert result == expected + with patch("salt.modules.win_file.HAS_WINDOWS_MODULES", False): + expected = (False, "Module win_file: Missing Win32 modules") + result = win_file.__virtual__() + assert result == expected + + +def test__virtual__no_dacl(): + with patch("salt.modules.win_file.HAS_WIN_DACL", False): + expected = (False, "Module win_file: Unable to load salt.utils.win_dacl") + result = win_file.__virtual__() + assert result == expected + + def test__get_version_os(): expected = ["32-bit Windows", "Windows NT"] result = win_file._get_version_os(0x00040004) @@ -56,6 +86,187 @@ def test__get_version_sys(): assert regex.search(result) +def test_get_pgid_error(): + with pytest.raises(CommandExecutionError): + win_file.get_pgid("C:\\Path\\That\\Does\\Not\\Exist.txt") + + +def test_get_pgid(): + """ + We can't know what this value is, so we're just making sure it found + something + """ + result = win_file.get_pgid(os.getenv("COMSPEC")) + assert result != "" + + +def test_group_to_gid(): + with patch.dict(win_file.__opts__, {}): + result = win_file.group_to_gid("Administrators") + expected = "S-1-5-32-544" + assert result == expected + + +def test_group_to_gid_empty(): + with patch.dict(win_file.__opts__, {}): + result = win_file.group_to_gid("") + expected = "S-1-5-32" + assert result == expected + + +def test_uid_to_user(): + result = win_file.uid_to_user("S-1-5-32-544") + expected = "Administrators" + assert result == expected + + +def test_uid_to_user_empty(): + result = win_file.uid_to_user("") + expected = "" + assert result == expected + + +def test_user_to_uid(): + result = win_file.user_to_uid("Administrator") + expected = salt.utils.win_dacl.get_sid_string("Administrator") + assert result == expected + + +def test_user_to_uid_none(): + result = win_file.user_to_uid(None) + expected = salt.utils.win_dacl.get_sid_string(salt.utils.user.get_user()) + assert result == expected + + +def test_get_uid(): + """ + We can't know what this value is, so we're just making sure it found + something + """ + result = win_file.get_uid(os.getenv("WINDIR")) + assert result != "" + + +def test_get_uid_error(): + with pytest.raises(CommandExecutionError): + win_file.get_uid("C:\\fake\\path") + + +def test_chown(tmp_path): + test_file = tmp_path / "test_file.txt" + test_file.touch() + win_file.chown(path=str(test_file), user="Administrators", pgroup="Guests") + assert win_file.get_user(str(test_file)) == "Administrators" + assert win_file.get_pgroup(str(test_file)) == "Guests" + + +def test_chpgrp(tmp_path): + test_file = tmp_path / "test_file.txt" + test_file.touch() + win_file.chown(path=str(test_file), user="Administrators", pgroup="Guests") + win_file.chpgrp(path=str(test_file), group="Administrators") + assert win_file.get_pgroup(str(test_file)) == "Administrators" + + +def test_stats_mode(tmp_path): + test_file = tmp_path / "test_file.txt" + test_file.touch() + results = win_file.stats(str(test_file)) + assert results["mode"] == "0666" + + +def test_is_link_true(tmp_path): + test_source = tmp_path / "test_source.txt" + test_link = tmp_path / "test_link.txt" + test_source.touch() + test_link.symlink_to(test_source) + results = win_file.is_link(str(test_link)) + expected = True + assert results == expected + + +def test_is_link_false(tmp_path): + test_file = tmp_path / "test_not_link.txt" + test_file.touch() + results = win_file.is_link(str(test_file)) + expected = False + assert results == expected + + +def test_mkdir(tmp_path): + test_dir = tmp_path / "test_dir" + grant_perms = {"Guests": {"perms": "full_control"}} + win_file.mkdir( + path=str(test_dir), + owner="Administrators", + grant_perms=grant_perms, + ) + owner = win_file.get_user(str(test_dir)) + assert owner == "Administrators" + perms = salt.utils.win_dacl.get_permissions(str(test_dir)) + assert perms["Not Inherited"]["Guests"]["grant"]["permissions"] == "Full control" + + +def test_check_perms(tmp_path): + test_dir = tmp_path / "test_dir" + test_dir.mkdir() + grant_perms = {"Guests": {"perms": "full_control"}} + ret = {} + with patch.dict(salt.utils.win_dacl.__opts__, {"test": False}): + result = win_file.check_perms( + path=str(test_dir), + ret=ret, + owner="Guests", + grant_perms=grant_perms, + ) + + expected = { + "changes": { + "grant_perms": { + "Guests": { + "permissions": "full_control", + }, + }, + "owner": "Guests", + }, + "comment": "", + "name": str(test_dir), + "result": True, + } + + assert result == expected + owner = win_file.get_user(str(test_dir)) + assert owner == "Guests" + perms = salt.utils.win_dacl.get_permissions(str(test_dir)) + assert perms["Not Inherited"]["Guests"]["grant"]["permissions"] == "Full control" + + +def test_set_perms(tmp_path): + test_dir = tmp_path / "test_dir" + test_dir.mkdir() + grant_perms = {"Guests": {"perms": "full_control"}} + win_file.set_perms( + path=str(test_dir), + grant_perms=grant_perms, + ) + perms = salt.utils.win_dacl.get_permissions(str(test_dir)) + assert perms["Not Inherited"]["Guests"]["grant"]["permissions"] == "Full control" + + +def test_get_user(): + """ + We can't know what this value is, so we're just making sure it found + something + """ + result = win_file.get_user(os.getenv("WINDIR")) + assert result != "" + + +def test_get_user_error(): + with pytest.raises(CommandExecutionError): + win_file.get_user("C:\\fake\\path") + + def test_version_missing_file(): with pytest.raises(CommandExecutionError): win_file.version("C:\\Windows\\bogus.exe") diff --git a/tests/pytests/unit/modules/test_win_iis.py b/tests/pytests/unit/modules/test_win_iis.py new file mode 100644 index 000000000000..f5e37724d243 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_iis.py @@ -0,0 +1,928 @@ +""" + Test cases for salt.modules.win_iis +""" + + +import pytest + +import salt.modules.win_iis as win_iis +import salt.utils.json +from salt.exceptions import SaltInvocationError +from tests.support.mock import MagicMock, call, patch + + +@pytest.fixture +def configure_loader_modules(): + return {win_iis: {}} + + +@pytest.fixture +def app_list(): + return { + "testApp": { + "apppool": "MyTestPool", + "path": "/testApp", + "preload": False, + "protocols": ["http"], + "sourcepath": r"C:\inetpub\apps\testApp", + } + } + + +@pytest.fixture +def apppol_list(): + return {"MyTestPool": {"applications": ["MyTestSite"], "state": "Started"}} + + +@pytest.fixture +def binding_list(): + return { + "*:80:": { + "certificatehash": None, + "certificatestorename": None, + "hostheader": None, + "ipaddress": "*", + "port": 80, + "protocol": "http", + "sslflags": 0, + }, + "*:443:mytestsite.local": { + "certificatehash": "9988776655443322111000AAABBBCCCDDDEEEFFF", + "certificatestorename": "My", + "hostheader": "mytestsite.local", + "ipaddress": "*", + "port": 443, + "protocol": "https", + "sslflags": 0, + }, + } + + +@pytest.fixture +def site_list(binding_list): + return { + "MyTestSite": { + "apppool": "MyTestPool", + "bindings": binding_list, + "id": 1, + "sourcepath": r"C:\inetpub\wwwroot", + "state": "Started", + } + } + + +@pytest.fixture +def vdir_list(): + return {"TestVdir": {"sourcepath": r"C:\inetpub\vdirs\TestVdir"}} + + +@pytest.fixture +def nested_vdir_list(): + return {"Test/Nested/Vdir": {"sourcepath": r"C:\inetpub\vdirs\NestedTestVdir"}} + + +@pytest.fixture +def list_apps_srvmgr(): + return { + "retcode": 0, + "stdout": salt.utils.json.dumps( + [ + { + "applicationPool": "MyTestPool", + "name": "testApp", + "path": "/testApp", + "PhysicalPath": r"C:\inetpub\apps\testApp", + "preloadEnabled": False, + "protocols": "http", + } + ] + ), + } + + +@pytest.fixture +def list_apppools_srvmgr(): + return { + "retcode": 0, + "stdout": salt.utils.json.dumps( + [ + { + "name": "MyTestPool", + "state": "Started", + "Applications": {"value": ["MyTestSite"], "Count": 1}, + } + ] + ), + } + + +@pytest.fixture +def list_vdirs_srvmgr(): + return { + "retcode": 0, + "stdout": salt.utils.json.dumps( + [{"name": "TestVdir", "physicalPath": r"C:\inetpub\vdirs\TestVdir"}] + ), + } + + +@pytest.fixture +def list_more_vdirs_srvmgr(): + return { + "retcode": 0, + "stdout": salt.utils.json.dumps( + [ + {"name": "TestVdir", "physicalPath": r"C:\inetpub\vdirs\TestVdir"}, + { + "name": "Test/Nested/Vdir", + "physicalPath": r"C:\inetpub\vdirs\NestedTestVdir", + }, + ] + ), + } + + +@pytest.fixture +def container_setting(): + return { + "retcode": 0, + "stdout": salt.utils.json.dumps([{"managedPipelineMode": "Integrated"}]), + } + + +@pytest.fixture +def cert_binding_info(): + return "*:443:mytestsite.local" + + +def test_create_apppool(): + """ + Test - Create an IIS application pool. + """ + with patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_apppools", MagicMock(return_value=dict()) + ), patch.dict( + win_iis.__salt__ + ): + assert win_iis.create_apppool("MyTestPool") + + +def test_list_apppools(apppol_list, list_apppools_srvmgr): + """ + Test - List all configured IIS application pools. + """ + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value=list_apppools_srvmgr) + ): + assert win_iis.list_apppools() == apppol_list + + +def test_remove_apppool(): + """ + Test - Remove an IIS application pool. + """ + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_apppools", + MagicMock( + return_value={"MyTestPool": {"applications": list(), "state": "Started"}} + ), + ): + assert win_iis.remove_apppool("MyTestPool") + + +def test_restart_apppool(): + """ + Test - Restart an IIS application pool. + """ + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ): + assert win_iis.restart_apppool("MyTestPool") + + +def test_create_site(): + """ + Test - Create a basic website in IIS. + """ + kwargs = { + "name": "MyTestSite", + "sourcepath": r"C:\inetpub\wwwroot", + "apppool": "MyTestPool", + "hostheader": "mytestsite.local", + "ipaddress": "*", + "port": 80, + "protocol": "http", + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch("salt.modules.win_iis.list_sites", MagicMock(return_value=dict())), patch( + "salt.modules.win_iis.list_apppools", MagicMock(return_value=dict()) + ): + assert win_iis.create_site(**kwargs) + + +def test_create_site_failed(): + """ + Test - Create a basic website in IIS using invalid data. + """ + kwargs = { + "name": "MyTestSite", + "sourcepath": r"C:\inetpub\wwwroot", + "apppool": "MyTestPool", + "hostheader": "mytestsite.local", + "ipaddress": "*", + "port": 80, + "protocol": "invalid-protocol-name", + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch("salt.modules.win_iis.list_sites", MagicMock(return_value=dict())), patch( + "salt.modules.win_iis.list_apppools", MagicMock(return_value=dict()) + ): + pytest.raises(SaltInvocationError, win_iis.create_site, **kwargs) + + +def test_remove_site(site_list): + """ + Test - Delete a website from IIS. + """ + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch("salt.modules.win_iis.list_sites", MagicMock(return_value=site_list)): + assert win_iis.remove_site("MyTestSite") + + +def test_create_app(app_list): + """ + Test - Create an IIS application. + """ + kwargs = { + "name": "testApp", + "site": "MyTestSite", + "sourcepath": r"C:\inetpub\apps\testApp", + "apppool": "MyTestPool", + } + with patch.dict(win_iis.__salt__), patch( + "os.path.isdir", MagicMock(return_value=True) + ), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_apps", MagicMock(return_value=app_list) + ): + assert win_iis.create_app(**kwargs) + + +def test_list_apps(app_list, list_apps_srvmgr): + """ + Test - Get all configured IIS applications for the specified site. + """ + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value=list_apps_srvmgr) + ): + assert win_iis.list_apps("MyTestSite") == app_list + + +def test_remove_app(app_list): + """ + Test - Remove an IIS application. + """ + kwargs = {"name": "otherApp", "site": "MyTestSite"} + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch("salt.modules.win_iis.list_apps", MagicMock(return_value=app_list)): + assert win_iis.remove_app(**kwargs) + + +def test_create_binding(binding_list): + """ + Test - Create an IIS binding. + """ + kwargs = { + "site": "MyTestSite", + "hostheader": "", + "ipaddress": "*", + "port": 80, + "protocol": "http", + "sslflags": 0, + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_bindings", MagicMock(return_value=binding_list) + ): + assert win_iis.create_binding(**kwargs) + + +def test_create_binding_failed(binding_list): + """ + Test - Create an IIS binding using invalid data. + """ + kwargs = { + "site": "MyTestSite", + "hostheader": "", + "ipaddress": "*", + "port": 80, + "protocol": "invalid-protocol-name", + "sslflags": 999, + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_bindings", MagicMock(return_value=binding_list) + ): + pytest.raises(SaltInvocationError, win_iis.create_binding, **kwargs) + + +def test_list_bindings(binding_list, site_list): + """ + Test - Get all configured IIS bindings for the specified site. + """ + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis.list_sites", MagicMock(return_value=site_list) + ): + assert win_iis.list_bindings("MyTestSite") == binding_list + + +def test_remove_binding(binding_list): + """ + Test - Remove an IIS binding. + """ + kwargs = { + "site": "MyTestSite", + "hostheader": "myothertestsite.local", + "ipaddress": "*", + "port": 443, + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_bindings", MagicMock(return_value=binding_list) + ): + assert win_iis.remove_binding(**kwargs) + + +def test_create_vdir(vdir_list): + """ + Test - Create an IIS virtual directory. + """ + kwargs = { + "name": "TestVdir", + "site": "MyTestSite", + "sourcepath": r"C:\inetpub\vdirs\TestVdir", + } + with patch.dict(win_iis.__salt__), patch( + "os.path.isdir", MagicMock(return_value=True) + ), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_vdirs", MagicMock(return_value=vdir_list) + ): + assert win_iis.create_vdir(**kwargs) + + +def test_list_vdirs(list_vdirs_srvmgr): + """ + Test - Get configured IIS virtual directories. + """ + vdirs = {"TestVdir": {"sourcepath": r"C:\inetpub\vdirs\TestVdir"}} + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value=list_vdirs_srvmgr) + ): + assert win_iis.list_vdirs("MyTestSite") == vdirs + + +def test_remove_vdir(vdir_list): + """ + Test - Remove an IIS virtual directory. + """ + kwargs = {"name": "TestOtherVdir", "site": "MyTestSite"} + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch("salt.modules.win_iis.list_vdirs", MagicMock(return_value=vdir_list)): + assert win_iis.remove_vdir(**kwargs) + + +def test_create_nested_vdir(nested_vdir_list): + """ + Test - Create a nested IIS virtual directory. + """ + kwargs = { + "name": "Test/Nested/Vdir", + "site": "MyTestSite", + "sourcepath": r"C:\inetpub\vdirs\NestedTestVdir", + } + with patch.dict(win_iis.__salt__), patch( + "os.path.isdir", MagicMock(return_value=True) + ), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_vdirs", MagicMock(return_value=nested_vdir_list) + ): + assert win_iis.create_vdir(**kwargs) + + +def test_list_nested_vdirs(list_more_vdirs_srvmgr): + """ + Test - Get configured IIS virtual directories. + """ + vdirs = { + "TestVdir": {"sourcepath": r"C:\inetpub\vdirs\TestVdir"}, + "Test/Nested/Vdir": {"sourcepath": r"C:\inetpub\vdirs\NestedTestVdir"}, + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", + MagicMock(return_value=list_more_vdirs_srvmgr), + ): + assert win_iis.list_vdirs("MyTestSite") == vdirs + + +def test_create_cert_binding(binding_list, cert_binding_info): + """ + Test - Assign a certificate to an IIS binding. + """ + kwargs = { + "name": "9988776655443322111000AAABBBCCCDDDEEEFFF", + "site": "MyTestSite", + "hostheader": "mytestsite.local", + "ipaddress": "*", + "port": 443, + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._list_certs", + MagicMock(return_value={"9988776655443322111000AAABBBCCCDDDEEEFFF": None}), + ), patch( + "salt.modules.win_iis._srvmgr", + MagicMock(return_value={"retcode": 0, "stdout": 10}), + ), patch( + "salt.utils.json.loads", + MagicMock(return_value=[{"MajorVersion": 10, "MinorVersion": 0}]), + ), patch( + "salt.modules.win_iis.list_bindings", MagicMock(return_value=binding_list) + ), patch( + "salt.modules.win_iis.list_cert_bindings", + MagicMock(return_value={cert_binding_info: binding_list[cert_binding_info]}), + ): + assert win_iis.create_cert_binding(**kwargs) + + +def test_list_cert_bindings(binding_list, site_list): + """ + Test - List certificate bindings for an IIS site. + """ + key = "*:443:mytestsite.local" + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis.list_sites", MagicMock(return_value=site_list) + ): + assert win_iis.list_cert_bindings("MyTestSite") == {key: binding_list[key]} + + +def test_remove_cert_binding(binding_list, cert_binding_info): + """ + Test - Remove a certificate from an IIS binding. + """ + kwargs = { + "name": "FFFEEEDDDCCCBBBAAA0001112233445566778899", + "site": "MyOtherTestSite", + "hostheader": "myothertestsite.local", + "ipaddress": "*", + "port": 443, + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.list_cert_bindings", + MagicMock(return_value={cert_binding_info: binding_list[cert_binding_info]}), + ): + assert win_iis.remove_cert_binding(**kwargs) + + +def test_get_container_setting(container_setting): + """ + Test - Get the value of the setting for the IIS container. + """ + kwargs = { + "name": "MyTestSite", + "container": "AppPools", + "settings": ["managedPipelineMode"], + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value=container_setting) + ): + assert win_iis.get_container_setting(**kwargs) == { + "managedPipelineMode": "Integrated" + } + + +def test_set_container_setting(): + """ + Test - Set the value of the setting for an IIS container. + """ + kwargs = { + "name": "MyTestSite", + "container": "AppPools", + "settings": {"managedPipelineMode": "Integrated"}, + } + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) + ), patch( + "salt.modules.win_iis.get_container_setting", + MagicMock(return_value={"managedPipelineMode": "Integrated"}), + ): + assert win_iis.set_container_setting(**kwargs) + + +def test__collection_match_to_index(): + bad_match = {"key_0": "value"} + first_match = {"key_1": "value"} + second_match = {"key_2": "value"} + collection = [first_match, second_match] + settings = [{"name": "enabled", "value": collection}] + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis.get_webconfiguration_settings", + MagicMock(return_value=settings), + ): + ret = win_iis._collection_match_to_index( + "pspath", "colfilter", "name", bad_match + ) + assert ret == -1 + ret = win_iis._collection_match_to_index( + "pspath", "colfilter", "name", first_match + ) + assert ret == 0 + ret = win_iis._collection_match_to_index( + "pspath", "colfilter", "name", second_match + ) + assert ret == 1 + + +def test__prepare_settings(): + simple_setting = {"name": "value", "filter": "value"} + collection_setting = {"name": "Collection[{yaml:\n\tdata}]", "filter": "value"} + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._collection_match_to_index", MagicMock(return_value=0) + ): + ret = win_iis._prepare_settings( + "pspath", + [ + simple_setting, + collection_setting, + {"invalid": "setting"}, + {"name": "filter-less_setting"}, + ], + ) + assert ret == [simple_setting, collection_setting] + + +@patch("salt.modules.win_iis.log") +def test_get_webconfiguration_settings_empty(mock_log): + ret = win_iis.get_webconfiguration_settings("name", settings=[]) + mock_log.warning.assert_called_once_with("No settings provided") + assert ret == {} + + +def test_get_webconfiguration_settings(): + # Setup + name = "IIS" + collection_setting = {"name": "Collection[{yaml:\n\tdata}]", "filter": "value"} + filter_setting = { + "name": "enabled", + "filter": ( + "system.webServer / security / authentication / anonymousAuthentication" + ), + } + settings = [collection_setting, filter_setting] + + ps_cmd = [ + "$Settings = New-Object System.Collections.ArrayList;", + ] + for setting in settings: + ps_cmd.extend( + [ + "$Property = Get-WebConfigurationProperty -PSPath '{}'".format(name), + "-Name '{name}' -Filter '{filter}' -ErrorAction Stop;".format( + filter=setting["filter"], name=setting["name"] + ), + "if (([String]::IsNullOrEmpty($Property) -eq $False) -and", + "($Property.GetType()).Name -eq 'ConfigurationAttribute') {", + "$Property = $Property | Select-Object", + "-ExpandProperty Value };", + "$Settings.add(@{{filter='{filter}';name='{name}';value=[String]" + " $Property}})| Out-Null;".format( + filter=setting["filter"], name=setting["name"] + ), + "$Property = $Null;", + ] + ) + ps_cmd.append("$Settings") + + # Execute + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._prepare_settings", MagicMock(return_value=settings) + ), patch( + "salt.modules.win_iis._srvmgr", + MagicMock(return_value={"retcode": 0, "stdout": "{}"}), + ): + ret = win_iis.get_webconfiguration_settings(name, settings=settings) + + # Verify + win_iis._srvmgr.assert_called_with(cmd=ps_cmd, return_json=True) + assert ret == {} + + +@patch("salt.modules.win_iis.log") +def test_set_webconfiguration_settings_empty(mock_log): + ret = win_iis.set_webconfiguration_settings("name", settings=[]) + mock_log.warning.assert_called_once_with("No settings provided") + assert ret is False + + +@patch("salt.modules.win_iis.log") +def test_set_webconfiguration_settings_no_changes(mock_log): + # Setup + name = "IIS" + setting = { + "name": "Collection[{yaml:\n\tdata}]", + "filter": ( + "system.webServer / security / authentication / anonymousAuthentication" + ), + "value": [], + } + settings = [setting] + + # Execute + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._prepare_settings", MagicMock(return_value=settings) + ), patch( + "salt.modules.win_iis._srvmgr", + MagicMock(return_value={"retcode": 0, "stdout": "{}"}), + ), patch( + "salt.modules.win_iis.get_webconfiguration_settings", + MagicMock(return_value=settings), + ): + ret = win_iis.set_webconfiguration_settings(name, settings=settings) + + # Verify + mock_log.debug.assert_called_with( + "Settings already contain the provided values." + ) + assert ret is True + + +@patch("salt.modules.win_iis.log") +def test_set_webconfiguration_settings_failed(mock_log): + # Setup + name = "IIS" + setting = { + "name": "Collection[{yaml:\n\tdata}]", + "filter": ( + "system.webServer / security / authentication / anonymousAuthentication" + ), + "value": [], + } + settings = [setting] + + # Execute + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._prepare_settings", MagicMock(return_value=settings) + ), patch( + "salt.modules.win_iis._srvmgr", + MagicMock(return_value={"retcode": 0, "stdout": "{}"}), + ), patch( + "salt.modules.win_iis.get_webconfiguration_settings", + MagicMock(side_effect=[[], [{"value": "unexpected_change!"}]]), + ): + + ret = win_iis.set_webconfiguration_settings(name, settings=settings) + + # Verify + assert ret is False + mock_log.error.assert_called_with("Failed to change settings: %s", settings) + + +@patch("salt.modules.win_iis.log") +def test_set_webconfiguration_settings(mock_log): + # Setup + name = "IIS" + setting = { + "name": "Collection[{yaml:\n\tdata}]", + "filter": ( + "system.webServer / security / authentication / anonymousAuthentication" + ), + "value": [], + } + settings = [setting] + + # Execute + with patch.dict(win_iis.__salt__), patch( + "salt.modules.win_iis._prepare_settings", MagicMock(return_value=settings) + ), patch( + "salt.modules.win_iis._srvmgr", + MagicMock(return_value={"retcode": 0, "stdout": "{}"}), + ), patch( + "salt.modules.win_iis.get_webconfiguration_settings", + MagicMock(side_effect=[[], settings]), + ): + ret = win_iis.set_webconfiguration_settings(name, settings=settings) + + # Verify + assert ret is True + mock_log.debug.assert_called_with( + "Settings configured successfully: %s", settings + ) + + +def test_get_webconfiguration_settings_no_settings(): + assert win_iis.get_webconfiguration_settings("salt", {}) == {} + + +def test_get_webconfiguration_settings_pass(): + settings = [ + { + "name": "enabled", + "filter": ( + "system.webServer/security/authentication/anonymousAuthentication" + ), + } + ] + + ps_cmd_validate = [ + "Get-WebConfigurationProperty", + "-PSPath", + "'salt'", + "-Filter", + "'system.webServer/security/authentication/anonymousAuthentication'", + "-Name", + "'enabled'", + "-ErrorAction", + "Stop", + "|", + "Out-Null;", + ] + + ps_cmd = [ + "$Settings = New-Object System.Collections.ArrayList;", + "$Property = Get-WebConfigurationProperty -PSPath 'salt'", + "-Name 'enabled' -Filter" + " 'system.webServer/security/authentication/anonymousAuthentication'" + " -ErrorAction Stop;", + "if (([String]::IsNullOrEmpty($Property) -eq $False) -and", + "($Property.GetType()).Name -eq 'ConfigurationAttribute') {", + "$Property = $Property | Select-Object", + "-ExpandProperty Value };", + "$Settings.add(@{filter='system.webServer/security/authentication/anonymousAuthentication';name='enabled';value=[String]" + " $Property})| Out-Null;", + "$Property = $Null;", + "$Settings", + ] + + func_ret = {"name": "enabled", "value": True} + with patch.object( + win_iis, "_srvmgr", return_value={"retcode": 0, "stdout": "json data"} + ) as _srvmgr: + with patch.object( + win_iis.salt.utils.json, "loads", return_value=func_ret + ) as loads: + ret = win_iis.get_webconfiguration_settings("salt", settings) + + assert _srvmgr.call_count == 2 + assert _srvmgr.mock_calls[0] == call(cmd=ps_cmd_validate, return_json=True) + assert _srvmgr.mock_calls[1] == call(cmd=ps_cmd, return_json=True) + + loads.assert_called_once_with("json data", strict=False) + assert func_ret == ret + + +def test_set_webconfiguration_settings_no_settings(): + assert win_iis.set_webconfiguration_settings("salt", {}) is False + + +def test_set_webconfiguration_settings_pass(): + settings = [ + { + "name": "enabled", + "filter": ( + "system.webServer/security/authentication/anonymousAuthentication" + ), + "value": False, + } + ] + + current_settings = [ + { + "name": "enabled", + "filter": ( + "system.webServer/security/authentication/anonymousAuthentication" + ), + "value": True, + } + ] + + new_settings = [ + { + "name": "enabled", + "filter": ( + "system.webServer/security/authentication/anonymousAuthentication" + ), + "value": False, + } + ] + + ps_cmd = [ + "Set-WebConfigurationProperty", + "-PSPath", + "'salt'", + "-Filter", + "'system.webServer/security/authentication/anonymousAuthentication'", + "-Name", + "'enabled'", + "-Value", + "'False';", + ] + + with patch.object( + win_iis, + "get_webconfiguration_settings", + side_effect=[current_settings, new_settings], + ) as get_webconfiguration_settings: + with patch.object(win_iis, "_srvmgr", return_value={"retcode": 0}) as _srvmgr: + ret = win_iis.set_webconfiguration_settings("salt", settings) + + assert get_webconfiguration_settings.call_count == 2 + assert get_webconfiguration_settings.mock_calls[0] == call( + name="salt", settings=settings + ) + assert get_webconfiguration_settings.mock_calls[1] == call( + name="salt", settings=settings + ) + + _srvmgr.assert_called_once_with(ps_cmd) + + assert ret + + +def test_set_webconfiguration_settings_fail(): + settings = [ + { + "name": "enabled", + "filter": ( + "system.webServer/security/authentication/anonymousAuthentication" + ), + "value": False, + } + ] + + current_settings = [ + { + "name": "enabled", + "filter": ( + "system.webServer/security/authentication/anonymousAuthentication" + ), + "value": True, + } + ] + + new_settings = [ + { + "name": "enabled", + "filter": ( + "system.webServer/security/authentication/anonymousAuthentication" + ), + "value": True, + } + ] + + ps_cmd = [ + "Set-WebConfigurationProperty", + "-PSPath", + "'salt'", + "-Filter", + "'system.webServer/security/authentication/anonymousAuthentication'", + "-Name", + "'enabled'", + "-Value", + "'False';", + ] + + with patch.object( + win_iis, + "get_webconfiguration_settings", + side_effect=[current_settings, new_settings], + ) as get_webconfiguration_settings: + with patch.object(win_iis, "_srvmgr", return_value={"retcode": 0}) as _srvmgr: + ret = win_iis.set_webconfiguration_settings("salt", settings) + + assert get_webconfiguration_settings.call_count == 2 + assert get_webconfiguration_settings.mock_calls[0] == call( + name="salt", settings=settings + ) + assert get_webconfiguration_settings.mock_calls[1] == call( + name="salt", settings=settings + ) + + _srvmgr.assert_called_once_with(ps_cmd) + + assert not ret diff --git a/tests/pytests/unit/modules/test_win_ip.py b/tests/pytests/unit/modules/test_win_ip.py new file mode 100644 index 000000000000..94a3fe7ca938 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_ip.py @@ -0,0 +1,396 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.win_ip +""" + +import pytest + +import salt.modules.win_ip as win_ip +from salt.exceptions import CommandExecutionError, SaltInvocationError +from tests.support.mock import MagicMock, call, patch + + +@pytest.fixture +def configure_loader_modules(): + return {win_ip: {}} + + +@pytest.fixture +def ethernet_config(): + return ( + 'Configuration for interface "Ethernet"\n' + "DHCP enabled: Yes\n" + "IP Address: 1.2.3.74\n" + "Subnet Prefix: 1.2.3.0/24 (mask 255.255.255.0)\n" + "Default Gateway: 1.2.3.1\n" + "Gateway Metric: 0\n" + "InterfaceMetric: 20\n" + "DNS servers configured through DHCP: 1.2.3.4\n" + "Register with which suffix: Primary only\n" + "WINS servers configured through DHCP: None\n" + ) + + +@pytest.fixture +def ethernet_enable(): + return "Ethernet\nType: Dedicated\nAdministrative state: Enabled\nConnect state: Connected" + + +# 'raw_interface_configs' function tests: 1 + + +def test_raw_interface_configs(ethernet_config): + """ + Test if it return raw configs for all interfaces. + """ + mock_cmd = MagicMock(return_value=ethernet_config) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.raw_interface_configs() == ethernet_config + + +# 'get_all_interfaces' function tests: 1 + + +def test_get_all_interfaces(ethernet_config): + """ + Test if it return configs for all interfaces. + """ + ret = { + "Ethernet": { + "DHCP enabled": "Yes", + "DNS servers configured through DHCP": ["1.2.3.4"], + "Default Gateway": "1.2.3.1", + "Gateway Metric": "0", + "InterfaceMetric": "20", + "Register with which suffix": "Primary only", + "WINS servers configured through DHCP": ["None"], + "ip_addrs": [ + { + "IP Address": "1.2.3.74", + "Netmask": "255.255.255.0", + "Subnet": "1.2.3.0/24", + } + ], + } + } + + mock_cmd = MagicMock(return_value=ethernet_config) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.get_all_interfaces() == ret + + +# 'get_interface' function tests: 1 + + +def test_get_interface(ethernet_config): + """ + Test if it return the configuration of a network interface. + """ + ret = { + "DHCP enabled": "Yes", + "DNS servers configured through DHCP": ["1.2.3.4"], + "Default Gateway": "1.2.3.1", + "Gateway Metric": "0", + "InterfaceMetric": "20", + "Register with which suffix": "Primary only", + "WINS servers configured through DHCP": ["None"], + "ip_addrs": [ + { + "IP Address": "1.2.3.74", + "Netmask": "255.255.255.0", + "Subnet": "1.2.3.0/24", + } + ], + } + + mock_cmd = MagicMock(return_value=ethernet_config) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.get_interface("Ethernet") == ret + + +# 'is_enabled' function tests: 1 + + +def test_is_enabled(ethernet_enable): + """ + Test if it returns `True` if interface is enabled, otherwise `False`. + """ + mock_cmd = MagicMock(side_effect=[ethernet_enable, ""]) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.is_enabled("Ethernet") + pytest.raises(CommandExecutionError, win_ip.is_enabled, "Ethernet") + + +# 'is_disabled' function tests: 1 + + +def test_is_disabled(ethernet_enable): + """ + Test if it returns `True` if interface is disabled, otherwise `False`. + """ + mock_cmd = MagicMock(return_value=ethernet_enable) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert not win_ip.is_disabled("Ethernet") + + +# 'enable' function tests: 1 + + +def test_enable(): + """ + Test if it enable an interface. + """ + # Test with enabled interface + with patch.object(win_ip, "is_enabled", return_value=True): + assert win_ip.enable("Ethernet") + + mock_cmd = MagicMock() + with patch.object(win_ip, "is_enabled", side_effect=[False, True]), patch.dict( + win_ip.__salt__, {"cmd.run": mock_cmd} + ): + assert win_ip.enable("Ethernet") + + mock_cmd.assert_called_once_with( + [ + "netsh", + "interface", + "set", + "interface", + "name=Ethernet", + "admin=ENABLED", + ], + python_shell=False, + ) + + +# 'disable' function tests: 1 + + +def test_disable(): + """ + Test if it disable an interface. + """ + with patch.object(win_ip, "is_disabled", return_value=True): + assert win_ip.disable("Ethernet") + + mock_cmd = MagicMock() + with patch.object(win_ip, "is_disabled", side_effect=[False, True]), patch.dict( + win_ip.__salt__, {"cmd.run": mock_cmd} + ): + assert win_ip.disable("Ethernet") + + mock_cmd.assert_called_once_with( + [ + "netsh", + "interface", + "set", + "interface", + "name=Ethernet", + "admin=DISABLED", + ], + python_shell=False, + ) + + +# 'get_subnet_length' function tests: 1 + + +def test_get_subnet_length(): + """ + Test if it disable an interface. + """ + assert win_ip.get_subnet_length("255.255.255.0") == 24 + pytest.raises(SaltInvocationError, win_ip.get_subnet_length, "255.255.0") + + +# 'set_static_ip' function tests: 1 + + +@pytest.mark.slow_test +def test_set_static_ip(ethernet_config): + """ + Test if it set static IP configuration on a Windows NIC. + """ + pytest.raises( + SaltInvocationError, + win_ip.set_static_ip, + "Local Area Connection", + "10.1.2/24", + ) + + mock_cmd = MagicMock(return_value=ethernet_config) + mock_all = MagicMock(return_value={"retcode": 1, "stderr": "Error"}) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd, "cmd.run_all": mock_all}): + pytest.raises( + CommandExecutionError, + win_ip.set_static_ip, + "Ethernet", + "1.2.3.74/24", + append=True, + ) + pytest.raises( + CommandExecutionError, win_ip.set_static_ip, "Ethernet", "1.2.3.74/24" + ) + + mock_all = MagicMock(return_value={"retcode": 0}) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd, "cmd.run_all": mock_all}): + assert win_ip.set_static_ip("Local Area Connection", "1.2.3.74/24") == {} + assert win_ip.set_static_ip("Ethernet", "1.2.3.74/24") == { + "Address Info": { + "IP Address": "1.2.3.74", + "Netmask": "255.255.255.0", + "Subnet": "1.2.3.0/24", + } + } + + +# 'set_dhcp_ip' function tests: 1 + + +def test_set_dhcp_ip(ethernet_config): + """ + Test if it set Windows NIC to get IP from DHCP. + """ + mock_cmd = MagicMock(return_value=ethernet_config) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.set_dhcp_ip("Ethernet") == { + "DHCP enabled": "Yes", + "Interface": "Ethernet", + } + + +# 'set_static_dns' function tests: 1 + + +def test_set_static_dns(): + """ + Test if it set static DNS configuration on a Windows NIC. + """ + mock_cmd = MagicMock() + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.set_static_dns("Ethernet", "192.168.1.252", "192.168.1.253") == { + "DNS Server": ("192.168.1.252", "192.168.1.253"), + "Interface": "Ethernet", + } + mock_cmd.assert_has_calls( + [ + call( + [ + "netsh", + "interface", + "ip", + "set", + "dns", + "name=Ethernet", + "source=static", + "address=192.168.1.252", + "register=primary", + ], + python_shell=False, + ), + call( + [ + "netsh", + "interface", + "ip", + "add", + "dns", + "name=Ethernet", + "address=192.168.1.253", + "index=2", + ], + python_shell=False, + ), + ] + ) + + +def test_set_static_dns_clear(): + """ + Test if it set static DNS configuration on a Windows NIC. + """ + mock_cmd = MagicMock() + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.set_static_dns("Ethernet", []) == { + "DNS Server": [], + "Interface": "Ethernet", + } + mock_cmd.assert_called_once_with( + [ + "netsh", + "interface", + "ip", + "set", + "dns", + "name=Ethernet", + "source=static", + "address=none", + ], + python_shell=False, + ) + + +def test_set_static_dns_no_action(): + """ + Test if it set static DNS configuration on a Windows NIC. + """ + # Test passing nothing + assert win_ip.set_static_dns("Ethernet") == { + "DNS Server": "No Changes", + "Interface": "Ethernet", + } + # Test passing None + assert win_ip.set_static_dns("Ethernet", None) == { + "DNS Server": "No Changes", + "Interface": "Ethernet", + } + + # Test passing string None + assert win_ip.set_static_dns("Ethernet", "None") == { + "DNS Server": "No Changes", + "Interface": "Ethernet", + } + + +# 'set_dhcp_dns' function tests: 1 + + +def test_set_dhcp_dns(ethernet_config): + """ + Test if it set DNS source to DHCP on Windows. + """ + mock_cmd = MagicMock(return_value=ethernet_config) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.set_dhcp_dns("Ethernet") == { + "DNS Server": "DHCP", + "Interface": "Ethernet", + } + + +# 'set_dhcp_all' function tests: 1 + + +def test_set_dhcp_all(ethernet_config): + """ + Test if it set both IP Address and DNS to DHCP. + """ + mock_cmd = MagicMock(return_value=ethernet_config) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.set_dhcp_all("Ethernet") == { + "Interface": "Ethernet", + "DNS Server": "DHCP", + "DHCP enabled": "Yes", + } + + +# 'get_default_gateway' function tests: 1 + + +def test_get_default_gateway(ethernet_config): + """ + Test if it set DNS source to DHCP on Windows. + """ + mock_cmd = MagicMock(return_value=ethernet_config) + with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): + assert win_ip.get_default_gateway() == "1.2.3.1" diff --git a/tests/pytests/unit/modules/test_win_lgpo_reg.py b/tests/pytests/unit/modules/test_win_lgpo_reg.py index df0b4624eae8..04284ee2727e 100644 --- a/tests/pytests/unit/modules/test_win_lgpo_reg.py +++ b/tests/pytests/unit/modules/test_win_lgpo_reg.py @@ -30,7 +30,7 @@ def configure_loader_modules(): @pytest.fixture -def empty_reg_pol(): +def empty_reg_pol_mach(): class_info = salt.utils.win_lgpo_reg.CLASS_INFO reg_pol_file = pathlib.Path(class_info["Machine"]["policy_path"]) if not reg_pol_file.parent.exists(): @@ -47,7 +47,24 @@ def empty_reg_pol(): @pytest.fixture -def reg_pol(): +def empty_reg_pol_user(): + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = pathlib.Path(class_info["User"]["policy_path"]) + if not reg_pol_file.parent.exists(): + reg_pol_file.parent.mkdir(parents=True) + with salt.utils.files.fopen(str(reg_pol_file), "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + with salt.utils.files.fopen(str(reg_pol_file), "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + +@pytest.fixture +def reg_pol_mach(): data_to_write = { "SOFTWARE\\MyKey1": { "MyValue1": { @@ -90,17 +107,151 @@ def reg_pol(): f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) -def test_read_reg_pol(empty_reg_pol): - expected = {} - result = lgpo_reg.read_reg_pol() - assert result == expected +@pytest.fixture +def reg_pol_user(): + data_to_write = { + "SOFTWARE\\MyKey1": { + "MyValue1": { + "data": "squidward", + "type": "REG_SZ", + }, + "**del.MyValue2": { + "data": " ", + "type": "REG_SZ", + }, + }, + "SOFTWARE\\MyKey2": { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + }, + } + lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = class_info["User"]["policy_path"] + with salt.utils.files.fopen(reg_pol_file, "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + +def test_invalid_policy_class_delete_value(): + pytest.raises( + SaltInvocationError, + lgpo_reg.delete_value, + key="", + v_name="", + policy_class="Invalid", + ) + + +def test_invalid_policy_class_disable_value(): + pytest.raises( + SaltInvocationError, + lgpo_reg.disable_value, + key="", + v_name="", + policy_class="Invalid", + ) + + +def test_invalid_policy_class_get_key(): + pytest.raises(SaltInvocationError, lgpo_reg.get_key, key="", policy_class="Invalid") -def test_read_reg_pol_invalid_policy_class(): +def test_invalid_policy_class_get_value(): + pytest.raises( + SaltInvocationError, + lgpo_reg.get_value, + key="", + v_name="", + policy_class="Invalid", + ) + + +def test_invalid_policy_class_read_reg_pol(): pytest.raises(SaltInvocationError, lgpo_reg.read_reg_pol, policy_class="Invalid") -def test_write_reg_pol(empty_reg_pol): +def test_invalid_policy_class_set_value(): + pytest.raises( + SaltInvocationError, + lgpo_reg.set_value, + key="", + v_name="", + v_data="", + policy_class="Invalid", + ) + + +def test_invalid_policy_class_write_reg_pol(): + pytest.raises( + SaltInvocationError, lgpo_reg.write_reg_pol, data={}, policy_class="Invalid" + ) + + +def test_set_value_invalid_reg_type(): + pytest.raises( + SaltInvocationError, + lgpo_reg.set_value, + key="", + v_name="", + v_data="", + v_type="REG_INVALID", + ) + + +def test_set_value_invalid_reg_sz(): + pytest.raises( + SaltInvocationError, + lgpo_reg.set_value, + key="", + v_name="", + v_data=[], + v_type="REG_SZ", + ) + + +def test_set_value_invalid_reg_multi_sz(): + pytest.raises( + SaltInvocationError, + lgpo_reg.set_value, + key="", + v_name="", + v_data=1, + v_type="REG_MULTI_SZ", + ) + + +def test_set_value_invalid_reg_dword(): + pytest.raises( + SaltInvocationError, lgpo_reg.set_value, key="", v_name="", v_data="string" + ) + + +def test_mach_read_reg_pol(empty_reg_pol_mach): + expected = {} + result = lgpo_reg.read_reg_pol() + assert result == expected + + +def test_mach_write_reg_pol(empty_reg_pol_mach): data_to_write = { r"SOFTWARE\MyKey": { "MyValue": { @@ -114,29 +265,13 @@ def test_write_reg_pol(empty_reg_pol): assert result == data_to_write -def test_write_reg_pol_invalid_policy_class(): - pytest.raises( - SaltInvocationError, lgpo_reg.write_reg_pol, data={}, policy_class="Invalid" - ) - - -def test_get_value(reg_pol): +def test_mach_get_value(reg_pol_mach): expected = {"data": "squidward", "type": "REG_SZ"} result = lgpo_reg.get_value(key="SOFTWARE\\MyKey1", v_name="MyValue1") assert result == expected -def test_get_value_invalid_policy_class(): - pytest.raises( - SaltInvocationError, - lgpo_reg.get_value, - key="", - v_name="", - policy_class="Invalid", - ) - - -def test_get_key(reg_pol): +def test_mach_get_key(reg_pol_mach): expected = { "MyValue3": { "data": ["spongebob", "squarepants"], @@ -147,17 +282,17 @@ def test_get_key(reg_pol): assert result == expected -def test_get_key_invalid_policy_class(): - pytest.raises(SaltInvocationError, lgpo_reg.get_key, key="", policy_class="Invalid") - - -def test_set_value(empty_reg_pol): - expected = {"data": 1, "type": "REG_DWORD"} +def test_mach_set_value(empty_reg_pol_mach): key = "SOFTWARE\\MyKey" v_name = "MyValue" - lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") + # Test command return + result = lgpo_reg.set_value(key=key, v_name=v_name, v_data="1") + assert result is True + # Test value actually set in Registry.pol + expected = {"data": 1, "type": "REG_DWORD"} result = lgpo_reg.get_value(key=key, v_name=v_name) assert result == expected + # Test that the registry value has been set expected = { "hive": "HKLM", "key": key, @@ -170,7 +305,7 @@ def test_set_value(empty_reg_pol): assert result == expected -def test_set_value_existing_change(reg_pol): +def test_mach_set_value_existing_change(reg_pol_mach): expected = {"data": 1, "type": "REG_DWORD"} key = "SOFTWARE\\MyKey" v_name = "MyValue1" @@ -189,7 +324,7 @@ def test_set_value_existing_change(reg_pol): assert result == expected -def test_set_value_existing_no_change(reg_pol): +def test_mach_set_value_existing_no_change(reg_pol_mach): expected = {"data": "squidward", "type": "REG_SZ"} key = "SOFTWARE\\MyKey" v_name = "MyValue1" @@ -198,118 +333,214 @@ def test_set_value_existing_no_change(reg_pol): assert result == expected -def test_set_value_invalid_policy_class(): - pytest.raises( - SaltInvocationError, - lgpo_reg.set_value, - key="", - v_name="", - v_data="", - policy_class="Invalid", - ) +def test_mach_disable_value(reg_pol_mach): + key = "SOFTWARE\\MyKey1" + # Test that the command completed successfully + result = lgpo_reg.disable_value(key=key, v_name="MyValue1") + assert result is True + # Test that the value was actually set in Registry.pol + expected = { + "**del.MyValue1": {"data": " ", "type": "REG_SZ"}, + "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, + } + result = lgpo_reg.get_key(key=key) + assert result == expected + # Test that the registry value has been removed + result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue1") + assert result is False -def test_set_value_invalid_reg_type(): - pytest.raises( - SaltInvocationError, - lgpo_reg.set_value, - key="", - v_name="", - v_data="", - v_type="REG_INVALID", - ) +def test_mach_disable_value_no_change(reg_pol_mach): + expected = { + "MyValue1": {"data": "squidward", "type": "REG_SZ"}, + "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, + } + key = "SOFTWARE\\MyKey1" + lgpo_reg.disable_value(key=key, v_name="MyValue2") + result = lgpo_reg.get_key(key=key) + assert result == expected -def test_set_value_invalid_reg_sz(): - pytest.raises( - SaltInvocationError, - lgpo_reg.set_value, - key="", - v_name="", - v_data=[], - v_type="REG_SZ", +def test_mach_delete_value_existing(reg_pol_mach): + key = "SOFTWARE\\MyKey1" + # Test that the command completes successfully + result = lgpo_reg.delete_value(key=key, v_name="MyValue1") + assert result is True + # Test that the value is actually removed from Registry.pol + expected = { + "**del.MyValue2": { + "data": " ", + "type": "REG_SZ", + }, + } + result = lgpo_reg.get_key(key=key) + assert result == expected + # Test that the registry entry has been removed + result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue2") + assert result is False + + +def test_mach_delete_value_no_change(empty_reg_pol_mach): + expected = {} + key = "SOFTWARE\\MyKey1" + lgpo_reg.delete_value(key=key, v_name="MyValue2") + result = lgpo_reg.get_key(key=key) + assert result == expected + + +def test_user_read_reg_pol(empty_reg_pol_user): + expected = {} + result = lgpo_reg.read_reg_pol(policy_class="User") + assert result == expected + + +def test_user_write_reg_pol(empty_reg_pol_user): + data_to_write = { + r"SOFTWARE\MyKey": { + "MyValue": { + "data": "string", + "type": "REG_SZ", + }, + }, + } + lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + result = lgpo_reg.read_reg_pol(policy_class="User") + assert result == data_to_write + + +def test_user_get_value(reg_pol_user): + expected = {"data": "squidward", "type": "REG_SZ"} + result = lgpo_reg.get_value( + key="SOFTWARE\\MyKey1", + v_name="MyValue1", + policy_class="User", ) + assert result == expected -def test_set_value_invalid_reg_multi_sz(): - pytest.raises( - SaltInvocationError, - lgpo_reg.set_value, - key="", - v_name="", - v_data=1, - v_type="REG_MULTI_SZ", +def test_user_get_key(reg_pol_user): + expected = { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + } + result = lgpo_reg.get_key(key="SOFTWARE\\MyKey2", policy_class="User") + assert result == expected + + +def test_user_set_value(empty_reg_pol_user): + key = "SOFTWARE\\MyKey" + v_name = "MyValue" + # Test command return + result = lgpo_reg.set_value( + key=key, + v_name=v_name, + v_data="1", + policy_class="User", ) + assert result is True + # Test value actually set in Registry.pol + expected = {"data": 1, "type": "REG_DWORD"} + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected + # Test that the registry value has been set + expected = { + "hive": "HKCU", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKCU", key=key, vname=v_name) + assert result == expected -def test_set_value_invalid_reg_dword(): - pytest.raises( - SaltInvocationError, lgpo_reg.set_value, key="", v_name="", v_data="string" +def test_user_set_value_existing_change(reg_pol_user): + expected = {"data": 1, "type": "REG_DWORD"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value(key=key, v_name=v_name, v_data="1", policy_class="User") + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected + expected = { + "hive": "HKCU", + "key": key, + "vname": v_name, + "vdata": 1, + "vtype": "REG_DWORD", + "success": True, + } + result = salt.utils.win_reg.read_value(hive="HKCU", key=key, vname=v_name) + assert result == expected + + +def test_user_set_value_existing_no_change(reg_pol_user): + expected = {"data": "squidward", "type": "REG_SZ"} + key = "SOFTWARE\\MyKey" + v_name = "MyValue1" + lgpo_reg.set_value( + key=key, + v_name=v_name, + v_data="squidward", + v_type="REG_SZ", + policy_class="User", ) + result = lgpo_reg.get_value(key=key, v_name=v_name, policy_class="User") + assert result == expected -def test_disable_value(reg_pol): +def test_user_disable_value(reg_pol_user): + key = "SOFTWARE\\MyKey1" + # Test that the command completed successfully + result = lgpo_reg.disable_value(key=key, v_name="MyValue1", policy_class="User") + assert result is True + # Test that the value was actually set in Registry.pol expected = { "**del.MyValue1": {"data": " ", "type": "REG_SZ"}, "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, } - key = "SOFTWARE\\MyKey1" - lgpo_reg.disable_value(key=key, v_name="MyValue1") - result = lgpo_reg.get_key(key=key) + result = lgpo_reg.get_key(key=key, policy_class="User") assert result == expected - result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue1") + # Test that the registry value has been removed + result = salt.utils.win_reg.value_exists(hive="HKCU", key=key, vname="MyValue1") assert result is False -def test_disable_value_no_change(reg_pol): +def test_user_disable_value_no_change(reg_pol_user): expected = { "MyValue1": {"data": "squidward", "type": "REG_SZ"}, "**del.MyValue2": {"data": " ", "type": "REG_SZ"}, } key = "SOFTWARE\\MyKey1" - lgpo_reg.disable_value(key=key, v_name="MyValue2") - result = lgpo_reg.get_key(key=key) + lgpo_reg.disable_value(key=key, v_name="MyValue2", policy_class="User") + result = lgpo_reg.get_key(key=key, policy_class="User") assert result == expected -def test_disable_value_invalid_policy_class(): - pytest.raises( - SaltInvocationError, - lgpo_reg.disable_value, - key="", - v_name="", - policy_class="Invalid", - ) - - -def test_delete_value_existing(reg_pol): +def test_user_delete_value_existing(reg_pol_user): + key = "SOFTWARE\\MyKey1" + # Test that the command completes successfully + result = lgpo_reg.delete_value(key=key, v_name="MyValue1", policy_class="User") + assert result is True + # Test that the value is actually removed from Registry.pol expected = { "**del.MyValue2": { "data": " ", "type": "REG_SZ", }, } - key = "SOFTWARE\\MyKey1" - lgpo_reg.delete_value(key=key, v_name="MyValue1") - result = lgpo_reg.get_key(key=key) + result = lgpo_reg.get_key(key=key, policy_class="User") assert result == expected - result = salt.utils.win_reg.value_exists(hive="HKLM", key=key, vname="MyValue2") + # Test that the registry entry has been removed + result = salt.utils.win_reg.value_exists(hive="HKCU", key=key, vname="MyValue2") assert result is False -def test_delete_value_no_change(empty_reg_pol): +def test_user_delete_value_no_change(empty_reg_pol_user): expected = {} key = "SOFTWARE\\MyKey1" - lgpo_reg.delete_value(key=key, v_name="MyValue2") - result = lgpo_reg.get_key(key=key) + lgpo_reg.delete_value(key=key, v_name="MyValue2", policy_class="User") + result = lgpo_reg.get_key(key=key, policy_class="User") assert result == expected - - -def test_delete_value_invalid_policy_class(): - pytest.raises( - SaltInvocationError, - lgpo_reg.delete_value, - key="", - v_name="", - policy_class="Invalid", - ) diff --git a/tests/pytests/unit/modules/test_win_licence.py b/tests/pytests/unit/modules/test_win_licence.py new file mode 100644 index 000000000000..2948be088f08 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_licence.py @@ -0,0 +1,102 @@ +""" + Test cases for salt.modules.win_licence +""" + + +import pytest + +import salt.modules.win_license as win_license +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {win_license: {}} + + +def test_installed(): + """ + Test to see if the given license key is installed + """ + mock = MagicMock(return_value="Partial Product Key: ABCDE") + with patch.dict(win_license.__salt__, {"cmd.run": mock}): + out = win_license.installed("AAAAA-AAAAA-AAAAA-AAAA-AAAAA-ABCDE") + mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /dli") + assert out + + +def test_installed_diff(): + """ + Test to see if the given license key is installed when the key is different + """ + mock = MagicMock(return_value="Partial Product Key: 12345") + with patch.dict(win_license.__salt__, {"cmd.run": mock}): + out = win_license.installed("AAAAA-AAAAA-AAAAA-AAAA-AAAAA-ABCDE") + mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /dli") + assert not out + + +def test_install(): + """ + Test installing the given product key + """ + mock = MagicMock() + with patch.dict(win_license.__salt__, {"cmd.run": mock}): + win_license.install("AAAAA-AAAAA-AAAAA-AAAA-AAAAA-ABCDE") + mock.assert_called_once_with( + r"cscript C:\Windows\System32\slmgr.vbs /ipk " + "AAAAA-AAAAA-AAAAA-AAAA-AAAAA-ABCDE" + ) + + +def test_uninstall(): + """ + Test uninstalling the given product key + """ + mock = MagicMock() + with patch.dict(win_license.__salt__, {"cmd.run": mock}): + win_license.uninstall() + mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /upk") + + +def test_activate(): + """ + Test activating the current product key + """ + mock = MagicMock() + with patch.dict(win_license.__salt__, {"cmd.run": mock}): + win_license.activate() + mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /ato") + + +def test_licensed(): + """ + Test checking if the minion is licensed + """ + mock = MagicMock(return_value="License Status: Licensed") + with patch.dict(win_license.__salt__, {"cmd.run": mock}): + win_license.licensed() + mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /dli") + + +def test_info(): + """ + Test getting the info about the current license key + """ + expected = { + "description": "Prof", + "licensed": True, + "name": "Win7", + "partial_key": "12345", + } + + mock = MagicMock( + return_value=( + "Name: Win7\r\nDescription: Prof\r\nPartial Product Key: 12345\r\n" + "License Status: Licensed" + ) + ) + with patch.dict(win_license.__salt__, {"cmd.run": mock}): + out = win_license.info() + mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /dli") + assert out == expected diff --git a/tests/pytests/unit/modules/test_win_pkg.py b/tests/pytests/unit/modules/test_win_pkg.py index 168c24aff146..9ef693a21f7c 100644 --- a/tests/pytests/unit/modules/test_win_pkg.py +++ b/tests/pytests/unit/modules/test_win_pkg.py @@ -6,6 +6,7 @@ import pytest import salt.modules.config as config +import salt.modules.cp as cp import salt.modules.pkg_resource as pkg_resource import salt.modules.win_pkg as win_pkg import salt.utils.data @@ -21,8 +22,17 @@ @pytest.fixture -def configure_loader_modules(): +def configure_loader_modules(minion_opts): pkg_info = { + "latest": { + "full_name": "Nullsoft Install System", + "installer": "http://download.sourceforge.net/project/nsis/nsis-setup.exe", + "install_flags": "/S", + "uninstaller": "%PROGRAMFILES(x86)%\\NSIS\\uninst-nsis.exe", + "uninstall_flags": "/S", + "msiexec": False, + "reboot": False, + }, "3.03": { "full_name": "Nullsoft Install System", "installer": "http://download.sourceforge.net/project/nsis/NSIS%203/3.03/nsis-3.03-setup.exe", @@ -43,16 +53,20 @@ def configure_loader_modules(): }, } + opts = minion_opts + opts["master_uri"] = "localhost" return { + cp: {"__opts__": opts}, win_pkg: { "_get_latest_package_version": MagicMock(return_value="3.03"), "_get_package_info": MagicMock(return_value=pkg_info), "__salt__": { + "config.valid_fileproto": config.valid_fileproto, + "cp.hash_file": cp.hash_file, "pkg_resource.add_pkg": pkg_resource.add_pkg, "pkg_resource.parse_targets": pkg_resource.parse_targets, "pkg_resource.sort_pkglist": pkg_resource.sort_pkglist, "pkg_resource.stringify": pkg_resource.stringify, - "config.valid_fileproto": config.valid_fileproto, }, "__utils__": { "reg.key_exists": win_reg.key_exists, @@ -164,19 +178,81 @@ def test_pkg_install_existing(): se_list_pkgs = {"nsis": ["3.03"]} with patch.object(win_pkg, "list_pkgs", return_value=se_list_pkgs), patch.object( win_pkg, "_get_reg_software", return_value=ret_reg - ), patch.dict( - win_pkg.__salt__, {"cp.is_cached": MagicMock(return_value=False)} ), patch.dict( win_pkg.__salt__, - {"cp.cache_file": MagicMock(return_value="C:\\fake\\path.exe")}, - ), patch.dict( - win_pkg.__salt__, {"cmd.run_all": MagicMock(return_value={"retcode": 0})} + { + "cmd.run_all": MagicMock(return_value={"retcode": 0}), + "cp.cache_file": MagicMock(return_value="C:\\fake\\path.exe"), + "cp.is_cached": MagicMock(return_value=True), + }, ): expected = {} result = win_pkg.install(name="nsis") assert expected == result +def test_pkg_install_latest(): + """ + test pkg.install when the package is already installed + no version passed + """ + ret_reg = {"Nullsoft Install System": "3.03"} + # The 2nd time it's run, pkg.list_pkgs uses with stringify + se_list_pkgs = [{"nsis": ["3.03"]}, {"nsis": "3.04"}] + mock_cache_file = MagicMock(return_value="C:\\fake\\path.exe") + with patch.object(win_pkg, "list_pkgs", side_effect=se_list_pkgs), patch.object( + win_pkg, "_get_reg_software", return_value=ret_reg + ), patch.dict( + win_pkg.__salt__, + { + "cmd.run_all": MagicMock(return_value={"retcode": 0}), + "cp.cache_file": mock_cache_file, + "cp.is_cached": MagicMock(return_value=False), + }, + ): + expected = {"nsis": {"new": "3.04", "old": "3.03"}} + result = win_pkg.install(name="nsis", version="latest") + assert expected == result + mock_cache_file.assert_called_once_with( + "http://download.sourceforge.net/project/nsis/nsis-setup.exe", + saltenv="base", + source_hash=None, + verify_ssl=True, + use_etag=True, + ) + + +def test_pkg_install_latest_is_cached(): + """ + test pkg.install when the package is already installed + no version passed + """ + ret_reg = {"Nullsoft Install System": "3.03"} + # The 2nd time it's run, pkg.list_pkgs uses with stringify + se_list_pkgs = [{"nsis": ["3.03"]}, {"nsis": "3.04"}] + mock_cache_file = MagicMock(return_value="C:\\fake\\path.exe") + with patch.object(win_pkg, "list_pkgs", side_effect=se_list_pkgs), patch.object( + win_pkg, "_get_reg_software", return_value=ret_reg + ), patch.dict( + win_pkg.__salt__, + { + "cmd.run_all": MagicMock(return_value={"retcode": 0}), + "cp.cache_file": mock_cache_file, + "cp.is_cached": MagicMock(return_value=True), + }, + ): + expected = {"nsis": {"new": "3.04", "old": "3.03"}} + result = win_pkg.install(name="nsis", version="latest") + assert expected == result + mock_cache_file.assert_called_once_with( + "http://download.sourceforge.net/project/nsis/nsis-setup.exe", + saltenv="base", + source_hash=None, + verify_ssl=True, + use_etag=True, + ) + + def test_pkg_install_existing_with_version(): """ test pkg.install when the package is already installed @@ -187,13 +263,13 @@ def test_pkg_install_existing_with_version(): se_list_pkgs = {"nsis": ["3.03"]} with patch.object(win_pkg, "list_pkgs", return_value=se_list_pkgs), patch.object( win_pkg, "_get_reg_software", return_value=ret_reg - ), patch.dict( - win_pkg.__salt__, {"cp.is_cached": MagicMock(return_value=False)} ), patch.dict( win_pkg.__salt__, - {"cp.cache_file": MagicMock(return_value="C:\\fake\\path.exe")}, - ), patch.dict( - win_pkg.__salt__, {"cmd.run_all": MagicMock(return_value={"retcode": 0})} + { + "cmd.run_all": MagicMock(return_value={"retcode": 0}), + "cp.cache_file": MagicMock(return_value="C:\\fake\\path.exe"), + "cp.is_cached": MagicMock(return_value=False), + }, ): expected = {} result = win_pkg.install(name="nsis", version="3.03") @@ -233,7 +309,7 @@ def test_pkg_install_name(): "cmd.run_all": mock_cmd_run_all, }, ): - ret = win_pkg.install( + win_pkg.install( name="firebox", version="3.03", extra_install_flags="-e True -test_flag True", @@ -241,6 +317,37 @@ def test_pkg_install_name(): assert "-e True -test_flag True" in str(mock_cmd_run_all.call_args[0]) +def test_pkg_install_verify_ssl_false(): + """ + test pkg.install using verify_ssl=False + """ + ret_reg = {"Nullsoft Install System": "3.03"} + # The 2nd time it's run, pkg.list_pkgs uses with stringify + se_list_pkgs = [{"nsis": ["3.03"]}, {"nsis": "3.02"}] + mock_cache_file = MagicMock(return_value="C:\\fake\\path.exe") + with patch.object(win_pkg, "list_pkgs", side_effect=se_list_pkgs), patch.object( + win_pkg, "_get_reg_software", return_value=ret_reg + ), patch.dict( + win_pkg.__salt__, + { + "cmd.run_all": MagicMock(return_value={"retcode": 0}), + "cp.cache_file": mock_cache_file, + "cp.is_cached": MagicMock(return_value=False), + "cp.hash_file": MagicMock(return_value={"hsum": "abc123"}), + }, + ): + expected = {"nsis": {"new": "3.02", "old": "3.03"}} + result = win_pkg.install(name="nsis", version="3.02", verify_ssl=False) + mock_cache_file.assert_called_once_with( + "http://download.sourceforge.net/project/nsis/NSIS%203/3.02/nsis-3.02-setup.exe", + saltenv="base", + source_hash="abc123", + verify_ssl=False, + use_etag=True, + ) + assert expected == result + + def test_pkg_install_single_pkg(): """ test pkg.install pkg with extra_install_flags @@ -273,7 +380,7 @@ def test_pkg_install_single_pkg(): "cmd.run_all": mock_cmd_run_all, }, ): - ret = win_pkg.install( + win_pkg.install( pkgs=["firebox"], version="3.03", extra_install_flags="-e True -test_flag True", @@ -321,7 +428,7 @@ def test_pkg_install_log_message(caplog): extra_install_flags="-e True -test_flag True", ) assert ( - 'PKG : cmd: C:\\WINDOWS\\system32\\cmd.exe /s /c "runme.exe" /s -e ' + 'PKG : cmd: C:\\WINDOWS\\system32\\cmd.exe /c "runme.exe" /s -e ' "True -test_flag True" ).lower() in [x.lower() for x in caplog.messages] assert "PKG : pwd: ".lower() in [x.lower() for x in caplog.messages] @@ -360,7 +467,7 @@ def test_pkg_install_multiple_pkgs(): "cmd.run_all": mock_cmd_run_all, }, ): - ret = win_pkg.install( + win_pkg.install( pkgs=["firebox", "got"], extra_install_flags="-e True -test_flag True" ) assert "-e True -test_flag True" not in str(mock_cmd_run_all.call_args[0]) @@ -402,7 +509,7 @@ def test_pkg_install_minion_error_https(): "cp.cache_file": mock_minion_error, }, ): - ret = win_pkg.install( + result = win_pkg.install( name="firebox", version="3.03", ) @@ -411,7 +518,7 @@ def test_pkg_install_minion_error_https(): " getaddrinfo failed reading https://repo.test.com/runme.exe" ) - assert ret == expected + assert result == expected def test_pkg_install_minion_error_salt(): @@ -442,12 +549,13 @@ def test_pkg_install_minion_error_salt(): ), patch.dict( win_pkg.__salt__, { - "pkg_resource.parse_targets": mock_parse, - "cp.is_cached": mock_none, "cp.cache_file": mock_minion_error, + "cp.is_cached": mock_none, + "cp.hash_file": MagicMock(return_value={"hsum": "abc123"}), + "pkg_resource.parse_targets": mock_parse, }, ): - ret = win_pkg.install( + result = win_pkg.install( name="firebox", version="3.03", ) @@ -456,7 +564,7 @@ def test_pkg_install_minion_error_salt(): "Error: [Errno 1] failed reading salt://software/runme.exe" ) - assert ret == expected + assert result == expected def test_pkg_install_minion_error_salt_cache_dir(): @@ -478,18 +586,19 @@ def test_pkg_install_minion_error_salt_cache_dir(): } err_msg = "Error: [Errno 1] failed reading salt://software" - mock_none = MagicMock(return_value=None) mock_minion_error = MagicMock(side_effect=MinionError(err_msg)) - mock_parse = MagicMock(return_value=[{"firebox": "3.03"}, None]) with patch.object( salt.utils.data, "is_true", MagicMock(return_value=True) ), patch.object( win_pkg, "_get_package_info", MagicMock(return_value=ret__get_package_info) ), patch.dict( win_pkg.__salt__, - {"cp.cache_dir": mock_minion_error}, + { + "cp.cache_dir": mock_minion_error, + "cp.hash_file": MagicMock(return_value={"hsum": "abc123"}), + }, ): - ret = win_pkg.install( + result = win_pkg.install( name="firebox", version="3.03", ) @@ -498,7 +607,52 @@ def test_pkg_install_minion_error_salt_cache_dir(): "Error: [Errno 1] failed reading salt://software" ) - assert ret == expected + assert result == expected + + +def test_pkg_remove_log_message(caplog): + """ + test pkg.remove pkg logging + """ + ret__get_package_info = { + "3.03": { + "uninstaller": "%program.exe", + "reboot": False, + "msiexec": False, + "installer": "runme.exe", + "uninstall_flags": "/S", + "locale": "en_US", + "install_flags": "/s", + "full_name": "Firebox 3.03 (x86 en-US)", + } + } + + mock_cmd_run_all = MagicMock(return_value={"retcode": 0}) + se_list_pkgs = {"firebox": ["3.03"]} + with patch.object(win_pkg, "list_pkgs", return_value=se_list_pkgs), patch.object( + salt.utils.data, "is_true", MagicMock(return_value=True) + ), patch.object( + win_pkg, "_get_package_info", MagicMock(return_value=ret__get_package_info) + ), patch.dict( + win_pkg.__salt__, + { + "pkg_resource.parse_targets": MagicMock( + return_value=[{"firebox": "3.03"}, None] + ), + "cp.is_cached": MagicMock(return_value="C:\\fake\\path.exe"), + "cmd.run_all": mock_cmd_run_all, + }, + ), caplog.at_level( + logging.DEBUG + ): + win_pkg.remove( + pkgs=["firebox"], + ) + assert ( + 'PKG : cmd: C:\\WINDOWS\\system32\\cmd.exe /c "%program.exe" /S' + ).lower() in [x.lower() for x in caplog.messages] + assert "PKG : pwd: ".lower() in [x.lower() for x in caplog.messages] + assert "PKG : retcode: 0" in caplog.messages def test_pkg_remove_minion_error_salt_cache_dir(): @@ -530,17 +684,18 @@ def test_pkg_remove_minion_error_salt_cache_dir(): ), patch.dict( win_pkg.__salt__, { - "pkg_resource.parse_targets": mock_parse, "cp.cache_dir": mock_minion_error, + "cp.hash_file": MagicMock(return_value={"hsum": "abc123"}), + "pkg_resource.parse_targets": mock_parse, }, ): - ret = win_pkg.remove(name="firebox") + result = win_pkg.remove(name="firebox") expected = ( "Failed to cache salt://software\n" "Error: [Errno 1] failed reading salt://software" ) - assert ret == expected + assert result == expected def test_pkg_remove_minion_error_salt(): @@ -572,15 +727,30 @@ def test_pkg_remove_minion_error_salt(): ), patch.dict( win_pkg.__salt__, { - "pkg_resource.parse_targets": mock_parse, - "cp.is_cached": mock_none, "cp.cache_file": mock_minion_error, + "cp.hash_file": MagicMock(return_value={"hsum": "abc123"}), + "cp.is_cached": mock_none, + "pkg_resource.parse_targets": mock_parse, }, ): - ret = win_pkg.remove(name="firebox") + result = win_pkg.remove(name="firebox") expected = ( "Failed to cache salt://software/runme.exe\n" "Error: [Errno 1] failed reading salt://software/runme.exe" ) - assert ret == expected + assert result == expected + + +@pytest.mark.parametrize( + "v1,v2,expected", + ( + ("2.24.0", "2.23.0.windows.1", 1), + ("2.23.0.windows.2", "2.23.0.windows.1", 1), + ), +) +def test__reverse_cmp_pkg_versions(v1, v2, expected): + result = win_pkg._reverse_cmp_pkg_versions(v1, v2) + assert result == expected, "cmp({}, {}) should be {}, got {}".format( + v1, v2, expected, result + ) diff --git a/tests/pytests/unit/modules/test_win_pki.py b/tests/pytests/unit/modules/test_win_pki.py new file mode 100644 index 000000000000..128fb14f820d --- /dev/null +++ b/tests/pytests/unit/modules/test_win_pki.py @@ -0,0 +1,184 @@ +""" + Test cases for salt.modules.win_pki +""" + + +import pytest + +import salt.modules.win_pki as win_pki +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {win_pki: {}} + + +@pytest.fixture +def cert_path(): + return r"C:\certs\testdomain.local.cer" + + +@pytest.fixture +def thumbprint(): + return "9988776655443322111000AAABBBCCCDDDEEEFFF" + + +@pytest.fixture +def certs(thumbprint): + return { + thumbprint: { + "dnsnames": ["testdomain.local"], + "serialnumber": "0123456789AABBCCDD", + "subject": "CN=testdomain.local, OU=testou, O=testorg, S=California, C=US", + "thumbprint": thumbprint, + "version": 3, + } + } + + +@pytest.fixture +def stores(): + return { + "CurrentUser": [ + "AuthRoot", + "CA", + "ClientAuthIssuer", + "Disallowed", + "MSIEHistoryJournal", + "My", + "Root", + "SmartCardRoot", + "Trust", + "TrustedPeople", + "TrustedPublisher", + "UserDS", + ], + "LocalMachine": [ + "AuthRoot", + "CA", + "ClientAuthIssuer", + "Disallowed", + "My", + "Remote Desktop", + "Root", + "SmartCardRoot", + "Trust", + "TrustedDevices", + "TrustedPeople", + "TrustedPublisher", + "WebHosting", + ], + } + + +@pytest.fixture +def json_certs(): + return [ + { + "DnsNameList": [ + {"Punycode": "testdomain.local", "Unicode": "testdomain.local"} + ], + "SerialNumber": "0123456789AABBCCDD", + "Subject": "CN=testdomain.local, OU=testou, O=testorg, S=California, C=US", + "Thumbprint": "9988776655443322111000AAABBBCCCDDDEEEFFF", + "Version": 3, + } + ] + + +@pytest.fixture +def json_stores(stores): + return [ + {"LocationName": "CurrentUser", "StoreNames": stores["CurrentUser"]}, + {"LocationName": "LocalMachine", "StoreNames": stores["LocalMachine"]}, + ] + + +def test_get_stores(stores, json_stores): + """ + Test - Get the certificate location contexts and their corresponding stores. + """ + with patch.dict(win_pki.__salt__), patch( + "salt.modules.win_pki._cmd_run", MagicMock(return_value=json_stores) + ): + assert win_pki.get_stores() == stores + + +def test_get_certs(certs, json_certs): + """ + Test - Get the available certificates in the given store. + """ + with patch.dict(win_pki.__salt__), patch( + "salt.modules.win_pki._cmd_run", MagicMock(return_value=json_certs) + ), patch("salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None)): + assert win_pki.get_certs() == certs + + +def test_get_cert_file(cert_path, thumbprint, certs, json_certs): + """ + Test - Get the details of the certificate file. + """ + kwargs = {"name": cert_path} + with patch.dict(win_pki.__salt__), patch( + "os.path.isfile", MagicMock(return_value=True) + ), patch("salt.modules.win_pki._cmd_run", MagicMock(return_value=json_certs)): + assert win_pki.get_cert_file(**kwargs) == certs[thumbprint] + + +def test_import_cert(cert_path, thumbprint, certs, json_certs): + """ + Test - Import the certificate file into the given certificate store. + """ + kwargs = {"name": cert_path} + mock_value = MagicMock(return_value=cert_path) + with patch.dict(win_pki.__salt__, {"cp.cache_file": mock_value}), patch( + "salt.modules.win_pki._cmd_run", MagicMock(return_value=json_certs) + ), patch( + "salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None) + ), patch( + "salt.modules.win_pki.get_cert_file", + MagicMock(return_value=certs[thumbprint]), + ), patch( + "salt.modules.win_pki.get_certs", MagicMock(return_value=certs) + ): + assert win_pki.import_cert(**kwargs) + + +def test_export_cert(cert_path, thumbprint): + """ + Test - Export the certificate to a file from the given certificate store. + """ + kwargs = {"name": cert_path, "thumbprint": thumbprint} + with patch.dict(win_pki.__salt__), patch( + "salt.modules.win_pki._cmd_run", MagicMock(return_value="True") + ), patch( + "salt.modules.win_pki._validate_cert_format", MagicMock(return_value=None) + ), patch( + "salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None) + ): + assert win_pki.export_cert(**kwargs) + + +def test_test_cert(thumbprint): + """ + Test - Check the certificate for validity. + """ + with patch.dict(win_pki.__salt__), patch( + "salt.modules.win_pki._cmd_run", MagicMock(return_value="True") + ), patch("salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None)): + assert win_pki.test_cert(thumbprint=thumbprint) + + +def test_remove_cert(thumbprint, certs): + """ + Test - Remove the certificate from the given certificate store. + """ + with patch.dict(win_pki.__salt__), patch( + "salt.modules.win_pki._cmd_run", MagicMock(return_value=None) + ), patch( + "salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None) + ), patch( + "salt.modules.win_pki.get_certs", MagicMock(return_value=certs) + ): + assert win_pki.remove_cert(thumbprint=thumbprint[::-1]) diff --git a/tests/pytests/unit/modules/test_win_powercfg.py b/tests/pytests/unit/modules/test_win_powercfg.py new file mode 100644 index 000000000000..e1cd9426a4c9 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_powercfg.py @@ -0,0 +1,284 @@ +""" + Test cases for salt.modules.win_powercfg +""" + + +import pytest + +import salt.modules.win_powercfg as powercfg +from tests.support.mock import MagicMock, call, patch + + +@pytest.fixture +def configure_loader_modules(): + return {powercfg: {"__grains__": {"osrelease": 8}}} + + +@pytest.fixture +def query_output(): + return """Subgroup GUID: 238c9fa8-0aad-41ed-83f4-97be242c8f20 (Hibernate) + GUID Alias: SUB_SLEEP + Power Setting GUID: 29f6c1db-86da-48c5-9fdb-f2b67b1f44da (Hibernate after) + GUID Alias: HIBERNATEIDLE + Minimum Possible Setting: 0x00000000 + Maximum Possible Setting: 0xffffffff + Possible Settings increment: 0x00000001 + Possible Settings units: Seconds + Current AC Power Setting Index: 0x00000708 + Current DC Power Setting Index: 0x00000384""" + + +def test_set_monitor_timeout(query_output): + """ + Test to make sure we can set the monitor timeout value + """ + mock = MagicMock(return_value=0) + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + mock_retcode = MagicMock(return_value=0) + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + with patch.dict(powercfg.__salt__, {"cmd.retcode": mock_retcode}): + powercfg.set_monitor_timeout(0, "dc") + mock.assert_called_once_with( + "powercfg /getactivescheme", python_shell=False + ) + mock_retcode.assert_called_once_with( + "powercfg /setdcvalueindex 381b4222-f694-41f0-9685-ff5bb260df2e" + " SUB_VIDEO VIDEOIDLE 0", + python_shell=False, + ) + + +def test_set_disk_timeout(query_output): + """ + Test to make sure we can set the disk timeout value + """ + mock = MagicMock() + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + mock_retcode = MagicMock(return_value=0) + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + with patch.dict(powercfg.__salt__, {"cmd.retcode": mock_retcode}): + powercfg.set_disk_timeout(0, "dc") + mock.assert_called_once_with( + "powercfg /getactivescheme", python_shell=False + ) + mock_retcode.assert_called_once_with( + "powercfg /setdcvalueindex 381b4222-f694-41f0-9685-ff5bb260df2e" + " SUB_DISK DISKIDLE 0", + python_shell=False, + ) + + +def test_set_standby_timeout(query_output): + """ + Test to make sure we can set the standby timeout value + """ + mock = MagicMock(return_value=0) + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + mock_retcode = MagicMock(return_value=0) + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + with patch.dict(powercfg.__salt__, {"cmd.retcode": mock_retcode}): + powercfg.set_standby_timeout(0, "dc") + mock.assert_called_once_with( + "powercfg /getactivescheme", python_shell=False + ) + mock_retcode.assert_called_once_with( + "powercfg /setdcvalueindex 381b4222-f694-41f0-9685-ff5bb260df2e" + " SUB_SLEEP STANDBYIDLE 0", + python_shell=False, + ) + + +def test_set_hibernate_timeout(query_output): + """ + Test to make sure we can set the hibernate timeout value + """ + mock = MagicMock(return_value=0) + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + mock_retcode = MagicMock(return_value=0) + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + with patch.dict(powercfg.__salt__, {"cmd.retcode": mock_retcode}): + powercfg.set_hibernate_timeout(0, "dc") + mock.assert_called_once_with( + "powercfg /getactivescheme", python_shell=False + ) + mock_retcode.assert_called_once_with( + "powercfg /setdcvalueindex 381b4222-f694-41f0-9685-ff5bb260df2e" + " SUB_SLEEP HIBERNATEIDLE 0", + python_shell=False, + ) + + +def test_get_monitor_timeout(query_output): + """ + Test to make sure we can get the monitor timeout value + """ + mock = MagicMock() + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + ret = powercfg.get_monitor_timeout() + calls = [ + call("powercfg /getactivescheme", python_shell=False), + call( + "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_VIDEO" + " VIDEOIDLE", + python_shell=False, + ), + ] + mock.assert_has_calls(calls) + + assert {"ac": 30, "dc": 15} == ret + + +def test_get_disk_timeout(query_output): + """ + Test to make sure we can get the disk timeout value + """ + mock = MagicMock() + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + ret = powercfg.get_disk_timeout() + calls = [ + call("powercfg /getactivescheme", python_shell=False), + call( + "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_DISK" " DISKIDLE", + python_shell=False, + ), + ] + mock.assert_has_calls(calls) + + assert {"ac": 30, "dc": 15} == ret + + +def test_get_standby_timeout(query_output): + """ + Test to make sure we can get the standby timeout value + """ + mock = MagicMock() + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + ret = powercfg.get_standby_timeout() + calls = [ + call("powercfg /getactivescheme", python_shell=False), + call( + "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_SLEEP" + " STANDBYIDLE", + python_shell=False, + ), + ] + mock.assert_has_calls(calls) + + assert {"ac": 30, "dc": 15} == ret + + +def test_get_hibernate_timeout(query_output): + """ + Test to make sure we can get the hibernate timeout value + """ + mock = MagicMock() + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + ret = powercfg.get_hibernate_timeout() + calls = [ + call("powercfg /getactivescheme", python_shell=False), + call( + "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_SLEEP" + " HIBERNATEIDLE", + python_shell=False, + ), + ] + mock.assert_has_calls(calls) + + assert {"ac": 30, "dc": 15} == ret + + +def test_windows_7(query_output): + """ + Test to make sure we can get the hibernate timeout value on windows 7 + """ + mock = MagicMock() + mock.side_effect = [ + "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", + query_output, + ] + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + with patch.dict(powercfg.__grains__, {"osrelease": "7"}): + ret = powercfg.get_hibernate_timeout() + calls = [ + call("powercfg /getactivescheme", python_shell=False), + call( + "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_SLEEP", + python_shell=False, + ), + ] + mock.assert_has_calls(calls) + + assert {"ac": 30, "dc": 15} == ret + + +def test_set_hibernate_timeout_scheme(query_output): + """ + Test to make sure we can set the hibernate timeout value + """ + mock = MagicMock(return_value=0) + mock.side_effect = [query_output] + + with patch.dict(powercfg.__salt__, {"cmd.retcode": mock}): + powercfg.set_hibernate_timeout(0, "dc", scheme="SCHEME_MIN") + mock.assert_called_once_with( + "powercfg /setdcvalueindex SCHEME_MIN SUB_SLEEP HIBERNATEIDLE 0", + python_shell=False, + ) + + +def test_get_hibernate_timeout_scheme(query_output): + """ + Test to make sure we can get the hibernate timeout value with a + specified scheme + """ + mock = MagicMock() + mock.side_effect = [query_output] + + with patch.dict(powercfg.__salt__, {"cmd.run": mock}): + ret = powercfg.get_hibernate_timeout(scheme="SCHEME_MIN") + mock.assert_called_once_with( + "powercfg /q SCHEME_MIN SUB_SLEEP HIBERNATEIDLE", python_shell=False + ) + + assert {"ac": 30, "dc": 15} == ret diff --git a/tests/unit/modules/test_win_psget.py b/tests/pytests/unit/modules/test_win_psget.py similarity index 61% rename from tests/unit/modules/test_win_psget.py rename to tests/pytests/unit/modules/test_win_psget.py index 49ce2550c659..af7737b121fe 100644 --- a/tests/unit/modules/test_win_psget.py +++ b/tests/pytests/unit/modules/test_win_psget.py @@ -1,9 +1,22 @@ +""" + Test cases for salt.modules.win_psget +""" + + +import pytest + import salt.modules.win_psget as win_psget -from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase -BOOTSTRAP_PS_STR = """ + +@pytest.fixture +def configure_loader_modules(): + return {win_psget: {}} + + +@pytest.fixture +def bootstrap_ps_str(): + return """ NuGet @@ -18,7 +31,10 @@ """ -AVAIL_MODULES_PS_STR = """ + +@pytest.fixture +def avail_modules_ps_str(): + return """ ActOnCmdlets @@ -39,37 +55,30 @@ """ -class WinPsgetCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.win_psget - """ - - def setup_loader_modules(self): - return {win_psget: {}} +def test_bootstrap(bootstrap_ps_str): + mock_read_ok = MagicMock( + return_value={ + "pid": 78, + "retcode": 0, + "stderr": "", + "stdout": bootstrap_ps_str, + } + ) - def test_bootstrap(self): - mock_read_ok = MagicMock( - return_value={ - "pid": 78, - "retcode": 0, - "stderr": "", - "stdout": BOOTSTRAP_PS_STR, - } - ) + with patch.dict(win_psget.__salt__, {"cmd.run_all": mock_read_ok}): + assert "NuGet" in win_psget.bootstrap() - with patch.dict(win_psget.__salt__, {"cmd.run_all": mock_read_ok}): - self.assertTrue("NuGet" in win_psget.bootstrap()) - def test_avail_modules(self): - mock_read_ok = MagicMock( - return_value={ - "pid": 78, - "retcode": 0, - "stderr": "", - "stdout": AVAIL_MODULES_PS_STR, - } - ) +def test_avail_modules(avail_modules_ps_str): + mock_read_ok = MagicMock( + return_value={ + "pid": 78, + "retcode": 0, + "stderr": "", + "stdout": avail_modules_ps_str, + } + ) - with patch.dict(win_psget.__salt__, {"cmd.run_all": mock_read_ok}): - self.assertTrue("DHCPMigration" in win_psget.avail_modules(False)) - self.assertTrue("DHCPMigration" in win_psget.avail_modules(True)) + with patch.dict(win_psget.__salt__, {"cmd.run_all": mock_read_ok}): + assert "DHCPMigration" in win_psget.avail_modules(False) + assert "DHCPMigration" in win_psget.avail_modules(True) diff --git a/tests/pytests/unit/modules/test_win_service.py b/tests/pytests/unit/modules/test_win_service.py new file mode 100644 index 000000000000..833ab0e776c5 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_service.py @@ -0,0 +1,395 @@ +""" +:codeauthor: Rahul Handay +""" + +import pytest + +import salt.modules.win_service as win_service +import salt.utils.path +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + +try: + import pywintypes + import win32serviceutil + + WINAPI = True +except ImportError: + WINAPI = False + +pytestmark = [ + pytest.mark.skip_unless_on_windows, +] + + +@pytest.fixture +def configure_loader_modules(): + return {win_service: {}} + + +def test_get_enabled(): + """ + Test to return the enabled services + """ + mock = MagicMock( + return_value=[ + {"ServiceName": "spongebob"}, + {"ServiceName": "squarepants"}, + {"ServiceName": "patrick"}, + ] + ) + with patch.object(win_service, "_get_services", mock): + mock_info = MagicMock( + side_effect=[ + {"StartType": "Auto"}, + {"StartType": "Manual"}, + {"StartType": "Disabled"}, + ] + ) + with patch.object(win_service, "info", mock_info): + assert win_service.get_enabled() == ["spongebob"] + + +def test_get_disabled(): + """ + Test to return the disabled services + """ + mock = MagicMock( + return_value=[ + {"ServiceName": "spongebob"}, + {"ServiceName": "squarepants"}, + {"ServiceName": "patrick"}, + ] + ) + with patch.object(win_service, "_get_services", mock): + mock_info = MagicMock( + side_effect=[ + {"StartType": "Auto"}, + {"StartType": "Manual"}, + {"StartType": "Disabled"}, + ] + ) + with patch.object(win_service, "info", mock_info): + result = win_service.get_disabled() + expected = ["patrick", "squarepants"] + assert result == expected + + +def test_available(): + """ + Test to Returns ``True`` if the specified service + is available, otherwise returns ``False`` + """ + mock = MagicMock(return_value=["c", "a", "b"]) + with patch.object(win_service, "get_all", mock): + assert win_service.available("a") is True + + +def test_missing(): + """ + Test to the inverse of service.available + """ + mock = MagicMock(return_value=["c", "a", "b"]) + with patch.object(win_service, "get_all", mock): + assert win_service.missing("d") is True + + +def test_get_all(): + """ + Test to return all installed services + """ + mock = MagicMock( + return_value=[ + {"ServiceName": "spongebob"}, + {"ServiceName": "squarepants"}, + {"ServiceName": "patrick"}, + ] + ) + with patch.object(win_service, "_get_services", mock): + expected = ["patrick", "spongebob", "squarepants"] + result = win_service.get_all() + assert result == expected + + +def test_get_service_name(): + """ + Test to the Display Name is what is displayed + in Windows when services.msc is executed. + """ + mock = MagicMock( + return_value=[ + {"ServiceName": "spongebob", "DisplayName": "Sponge Bob"}, + {"ServiceName": "squarepants", "DisplayName": "Square Pants"}, + {"ServiceName": "patrick", "DisplayName": "Patrick the Starfish"}, + ] + ) + with patch.object(win_service, "_get_services", mock): + expected = { + "Patrick the Starfish": "patrick", + "Sponge Bob": "spongebob", + "Square Pants": "squarepants", + } + result = win_service.get_service_name() + assert result == expected + + expected = {"Patrick the Starfish": "patrick"} + result = win_service.get_service_name("patrick") + assert result == expected + + +@pytest.mark.skipif(not WINAPI, reason="win32serviceutil not available") +@pytest.mark.slow_test +def test_start(): + """ + Test to start the specified service + """ + mock_true = MagicMock(return_value=True) + mock_false = MagicMock(return_value=False) + mock_info = MagicMock(side_effect=[{"Status": "Running"}]) + + with patch.object(win32serviceutil, "StartService", mock_true), patch.object( + win_service, "disabled", mock_false + ), patch.object(win_service, "info", mock_info): + assert win_service.start("spongebob") is True + + mock_info = MagicMock( + side_effect=[ + {"Status": "Stopped", "Status_WaitHint": 0}, + {"Status": "Start Pending", "Status_WaitHint": 0}, + {"Status": "Running"}, + ] + ) + + with patch.object(win32serviceutil, "StartService", mock_true), patch.object( + win_service, "disabled", mock_false + ), patch.object(win_service, "info", mock_info), patch.object( + win_service, "status", mock_true + ): + assert win_service.start("spongebob") is True + + +@pytest.mark.skipif(not WINAPI, reason="pywintypes not available") +def test_start_already_running(): + """ + Test starting a service that is already running + """ + mock_false = MagicMock(return_value=False) + mock_error = MagicMock( + side_effect=pywintypes.error(1056, "StartService", "Service is running") + ) + mock_info = MagicMock(side_effect=[{"Status": "Running"}]) + with patch.object(win32serviceutil, "StartService", mock_error), patch.object( + win_service, "disabled", mock_false + ), patch.object(win_service, "_status_wait", mock_info): + assert win_service.start("spongebob") is True + + +@pytest.mark.skipif(not WINAPI, reason="win32serviceutil not available") +@pytest.mark.slow_test +def test_stop(): + """ + Test to stop the specified service + """ + mock_true = MagicMock(return_value=True) + mock_false = MagicMock(return_value=False) + mock_info = MagicMock(side_effect=[{"Status": "Stopped"}]) + + with patch.object(win32serviceutil, "StopService", mock_true), patch.object( + win_service, "_status_wait", mock_info + ): + assert win_service.stop("spongebob") is True + + mock_info = MagicMock( + side_effect=[ + {"Status": "Running", "Status_WaitHint": 0}, + {"Status": "Stop Pending", "Status_WaitHint": 0}, + {"Status": "Stopped"}, + ] + ) + + with patch.object(win32serviceutil, "StopService", mock_true), patch.object( + win_service, "info", mock_info + ), patch.object(win_service, "status", mock_false): + assert win_service.stop("spongebob") is True + + +@pytest.mark.skipif(not WINAPI, reason="pywintypes not available") +def test_stop_not_running(): + """ + Test stopping a service that is already stopped + """ + mock_error = MagicMock( + side_effect=pywintypes.error(1062, "StopService", "Service is not running") + ) + mock_info = MagicMock(side_effect=[{"Status": "Stopped"}]) + with patch.object(win32serviceutil, "StopService", mock_error), patch.object( + win_service, "_status_wait", mock_info + ): + assert win_service.stop("spongebob") is True + + +def test_restart(): + """ + Test to restart the named service + """ + mock_true = MagicMock(return_value=True) + with patch.object(win_service, "create_win_salt_restart_task", mock_true): + with patch.object(win_service, "execute_salt_restart_task", mock_true): + assert win_service.restart("salt-minion") is True + + with patch.object(win_service, "stop", mock_true): + with patch.object(win_service, "start", mock_true): + assert win_service.restart("salt") is True + + +def test_createwin_saltrestart_task(): + """ + Test to create a task in Windows task + scheduler to enable restarting the salt-minion + """ + cmd = salt.utils.path.which("cmd") + mock = MagicMock() + with patch.dict(win_service.__salt__, {"task.create_task": mock}): + win_service.create_win_salt_restart_task() + mock.assert_called_once_with( + action_type="Execute", + arguments=( + "/c ping -n 3 127.0.0.1 && net stop salt-minion && " + "net start salt-minion" + ), + cmd=cmd, + force=True, + name="restart-salt-minion", + start_date="1975-01-01", + start_time="01:00", + trigger_type="Once", + user_name="System", + ) + + +def test_execute_salt_restart_task(): + """ + Test to run the Windows Salt restart task + """ + mock_true = MagicMock(return_value=True) + with patch.dict(win_service.__salt__, {"task.run": mock_true}): + assert win_service.execute_salt_restart_task() is True + + +@pytest.mark.skipif(not WINAPI, reason="win32serviceutil not available") +def test_status(): + """ + Test to return the status for a service + """ + mock_info = MagicMock( + side_effect=[ + {"Status": "Running"}, + {"Status": "Stop Pending"}, + {"Status": "Stopped"}, + CommandExecutionError, + ] + ) + + with patch.object(win_service, "info", mock_info): + assert win_service.status("spongebob") is True + assert win_service.status("patrick") is True + assert win_service.status("squidward") is False + assert win_service.status("non_existing") == "Not Found" + + +def test_getsid(): + """ + Test to return the sid for this windows service + """ + mock_info = MagicMock( + side_effect=[{"sid": "S-1-5-80-1956725871..."}, {"sid": None}] + ) + with patch.object(win_service, "info", mock_info): + expected = "S-1-5-80-1956725871..." + result = win_service.getsid("spongebob") + assert result == expected + assert win_service.getsid("plankton") is None + + +def test_enable(): + """ + Test to enable the named service to start at boot + """ + mock_modify = MagicMock(return_value=True) + mock_info = MagicMock(return_value={"StartType": "Auto", "StartTypeDelayed": False}) + with patch.object(win_service, "modify", mock_modify): + with patch.object(win_service, "info", mock_info): + assert win_service.enable("spongebob") is True + + +def test_disable(): + """ + Test to disable the named service to start at boot + """ + mock_modify = MagicMock(return_value=True) + mock_info = MagicMock(return_value={"StartType": "Disabled"}) + with patch.object(win_service, "modify", mock_modify): + with patch.object(win_service, "info", mock_info): + assert win_service.disable("spongebob") is True + + +def test_enabled(): + """ + Test to check to see if the named + service is enabled to start on boot + """ + mock = MagicMock(side_effect=[{"StartType": "Auto"}, {"StartType": "Disabled"}]) + with patch.object(win_service, "info", mock): + assert win_service.enabled("spongebob") is True + assert win_service.enabled("squarepants") is False + + +def test_enabled_with_space_in_name(): + """ + Test to check to see if the named + service is enabled to start on boot + when have space in service name + """ + mock = MagicMock(side_effect=[{"StartType": "Auto"}, {"StartType": "Disabled"}]) + with patch.object(win_service, "info", mock): + assert win_service.enabled("spongebob test") is True + assert win_service.enabled("squarepants test") is False + + +def test_disabled(): + """ + Test to check to see if the named + service is disabled to start on boot + """ + mock = MagicMock(side_effect=[False, True]) + with patch.object(win_service, "enabled", mock): + assert win_service.disabled("spongebob") is True + assert win_service.disabled("squarepants") is False + + +def test_cmd_quote(): + """ + Make sure the command gets quoted correctly + """ + # Should always return command wrapped in double quotes + expected = r'"C:\Program Files\salt\test.exe"' + + # test no quotes + bin_path = r"C:\Program Files\salt\test.exe" + result = win_service._cmd_quote(bin_path) + assert result == expected + + # test single quotes + bin_path = r"'C:\Program Files\salt\test.exe'" + result = win_service._cmd_quote(bin_path) + assert result == expected + + # test double quoted single quotes + bin_path = "\"'C:\\Program Files\\salt\\test.exe'\"" + result = win_service._cmd_quote(bin_path) + assert result == expected + + # test single quoted, double quoted, single quotes + bin_path = "'\"'C:\\Program Files\\salt\\test.exe'\"'" + result = win_service._cmd_quote(bin_path) + assert result == expected diff --git a/tests/pytests/unit/modules/test_win_shadow.py b/tests/pytests/unit/modules/test_win_shadow.py new file mode 100644 index 000000000000..bccb67fe9da4 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_shadow.py @@ -0,0 +1,50 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.modules.win_shadow +""" + + +import pytest + +import salt.modules.win_shadow as win_shadow +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {win_shadow: {"__salt__": {"user.update": MagicMock(return_value=True)}}} + + +def test_info(): + """ + Test if it return information for the specified user + """ + mock_user_info = MagicMock( + return_value={"name": "SALT", "password_changed": "", "expiration_date": ""} + ) + with patch.dict(win_shadow.__salt__, {"user.info": mock_user_info}): + assert win_shadow.info("SALT") == { + "name": "SALT", + "passwd": "Unavailable", + "lstchg": "", + "min": "", + "max": "", + "warn": "", + "inact": "", + "expire": "", + } + + +def test_set_password(): + """ + Test if it set the password for a named user. + """ + mock_cmd = MagicMock(return_value={"retcode": False}) + mock_user_info = MagicMock( + return_value={"name": "SALT", "password_changed": "", "expiration_date": ""} + ) + with patch.dict( + win_shadow.__salt__, {"cmd.run_all": mock_cmd, "user.info": mock_user_info} + ): + assert win_shadow.set_password("root", "mysecretpassword") diff --git a/tests/pytests/unit/modules/test_win_snmp.py b/tests/pytests/unit/modules/test_win_snmp.py new file mode 100644 index 000000000000..339f834e0d83 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_snmp.py @@ -0,0 +1,114 @@ +""" + Test cases for salt.modules.win_snmp +""" + + +import pytest + +import salt.modules.win_snmp as win_snmp +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def community_names(): + return {"TestCommunity": "Read Create"} + + +@pytest.fixture +def configure_loader_modules(): + return {win_snmp: {}} + + +def test_get_agent_service_types(): + """ + Test - Get the sysServices types that can be configured. + """ + assert isinstance(win_snmp.get_agent_service_types(), list) + + +def test_get_permission_types(): + """ + Test - Get the permission types that can be configured for communities. + """ + assert isinstance(win_snmp.get_permission_types(), list) + + +def test_get_auth_traps_enabled(): + """ + Test - Determine whether the host is configured to send authentication traps. + """ + mock_value = MagicMock(return_value={"vdata": 1}) + with patch.dict(win_snmp.__utils__, {"reg.read_value": mock_value}): + assert win_snmp.get_auth_traps_enabled() + + +def test_set_auth_traps_enabled(): + """ + Test - Manage the sending of authentication traps. + """ + mock_value = MagicMock(return_value=True) + kwargs = {"status": True} + with patch.dict(win_snmp.__utils__, {"reg.set_value": mock_value}), patch( + "salt.modules.win_snmp.get_auth_traps_enabled", MagicMock(return_value=True) + ): + assert win_snmp.set_auth_traps_enabled(**kwargs) + + +def test_get_community_names(community_names): + """ + Test - Get the current accepted SNMP community names and their permissions. + """ + mock_ret = MagicMock(return_value=[{"vdata": 16, "vname": "TestCommunity"}]) + mock_false = MagicMock(return_value=False) + with patch.dict( + win_snmp.__utils__, + {"reg.list_values": mock_ret, "reg.key_exists": mock_false}, + ): + assert win_snmp.get_community_names() == community_names + + +def test_get_community_names_gpo(): + """ + Test - Get the current accepted SNMP community names and their permissions. + """ + mock_ret = MagicMock(return_value=[{"vdata": "TestCommunity", "vname": 1}]) + mock_false = MagicMock(return_value=True) + with patch.dict( + win_snmp.__utils__, + {"reg.list_values": mock_ret, "reg.key_exists": mock_false}, + ): + assert win_snmp.get_community_names() == {"TestCommunity": "Managed by GPO"} + + +def test_set_community_names(community_names): + """ + Test - Manage the SNMP accepted community names and their permissions. + """ + mock_true = MagicMock(return_value=True) + kwargs = {"communities": community_names} + mock_false = MagicMock(return_value=False) + with patch.dict( + win_snmp.__utils__, + {"reg.set_value": mock_true, "reg.key_exists": mock_false}, + ), patch( + "salt.modules.win_snmp.get_community_names", + MagicMock(return_value=community_names), + ): + assert win_snmp.set_community_names(**kwargs) + + +def test_set_community_names_gpo(community_names): + """ + Test - Manage the SNMP accepted community names and their permissions. + """ + mock_true = MagicMock(return_value=True) + kwargs = {"communities": community_names} + with patch.dict( + win_snmp.__utils__, + {"reg.set_value": mock_true, "reg.key_exists": mock_true}, + ), patch( + "salt.modules.win_snmp.get_community_names", + MagicMock(return_value=community_names), + ): + pytest.raises(CommandExecutionError, win_snmp.set_community_names, **kwargs) diff --git a/tests/pytests/unit/modules/test_win_task.py b/tests/pytests/unit/modules/test_win_task.py new file mode 100644 index 000000000000..6e71612264a9 --- /dev/null +++ b/tests/pytests/unit/modules/test_win_task.py @@ -0,0 +1,86 @@ +""" +Test the win_task execution module +""" +from datetime import datetime + +import pytest + +import salt.modules.win_task as win_task + +pytestmark = [ + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, +] + + +@pytest.fixture(scope="function") +def base_task(): + task_name = "SaltTest" + result = win_task.create_task( + task_name, + user_name="System", + force=True, + action_type="Execute", + cmd="c:\\salt\\salt-call.bat", + ) + assert result is True + yield task_name + result = win_task.delete_task(task_name) + assert result is True + + +def test_repeat_interval(base_task): + result = win_task.add_trigger( + base_task, + trigger_type="Daily", + trigger_enabled=True, + repeat_duration="30 minutes", + repeat_interval="30 minutes", + ) + assert result is True + + result = win_task.info(base_task) + assert result["triggers"][0]["enabled"] is True + assert result["triggers"][0]["trigger_type"] == "Daily" + assert result["triggers"][0]["repeat_duration"] == "30 minutes" + assert result["triggers"][0]["repeat_interval"] == "30 minutes" + + +def test_repeat_interval_and_indefinitely(base_task): + result = win_task.add_trigger( + base_task, + trigger_type="Daily", + trigger_enabled=True, + repeat_duration="Indefinitely", + repeat_interval="30 minutes", + ) + assert result is True + + result = win_task.info(base_task) + assert result["triggers"][0]["enabled"] is True + assert result["triggers"][0]["trigger_type"] == "Daily" + assert result["triggers"][0]["repeat_duration"] == "Indefinitely" + assert result["triggers"][0]["repeat_interval"] == "30 minutes" + + +def test_edit_task_delete_after(base_task): + result = win_task.add_trigger( + base_task, + trigger_type="Daily", + trigger_enabled=True, + end_date=datetime.today().strftime("%Y-%m-%d"), + end_time="23:59:59", + ) + assert result is True + + result = win_task.edit_task(base_task, delete_after="30 days") + assert result is True + + result = win_task.info(base_task) + assert result["settings"]["delete_after"] == "30 days" + + result = win_task.edit_task(base_task, delete_after=False) + assert result is True + + result = win_task.info(base_task) + assert result["settings"]["delete_after"] is False diff --git a/tests/pytests/unit/modules/test_xapi_virt.py b/tests/pytests/unit/modules/test_xapi_virt.py new file mode 100644 index 000000000000..47cdc4b6376e --- /dev/null +++ b/tests/pytests/unit/modules/test_xapi_virt.py @@ -0,0 +1,387 @@ +""" + :codeauthor: Rahul Handay + + Test cases for salt.modules.xapi +""" + + +import pytest + +import salt.modules.xapi_virt as xapi +from tests.support.mock import MagicMock, mock_open, patch + + +class Mockxapi: + """ + Mock xapi class + """ + + def __init__(self): + pass + + class Session: + """ + Mock Session class + """ + + def __init__(self, xapi_uri): + pass + + class xenapi: + """ + Mock xenapi class + """ + + def __init__(self): + pass + + @staticmethod + def login_with_password(xapi_login, xapi_password): + """ + Mock login_with_password method + """ + return xapi_login, xapi_password + + class session: + """ + Mock session class + """ + + def __init__(self): + pass + + @staticmethod + def logout(): + """ + Mock logout method + """ + return Mockxapi() + + +@pytest.fixture +def configure_loader_modules(): + return {xapi: {}} + + +def test_list_domains(): + """ + Test to return a list of domain names on the minion + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + assert xapi.list_domains() == [] + + +def test_vm_info(): + """ + Test to return detailed information about the vms + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(return_value=False) + with patch.object(xapi, "_get_record_by_label", mock): + assert xapi.vm_info(True) == {True: False} + + +def test_vm_state(): + """ + Test to return list of all the vms and their state. + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(return_value={"power_state": "1"}) + with patch.object(xapi, "_get_record_by_label", mock): + assert xapi.vm_state("salt") == {"salt": "1"} + assert xapi.vm_state() == {} + + +def test_get_nics(): + """ + Test to return info about the network interfaces of a named vm + """ + ret = {"Stack": {"device": "ETH0", "mac": "Stack", "mtu": 1}} + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, {"VIFs": "salt"}]) + with patch.object(xapi, "_get_record_by_label", mock): + assert not xapi.get_nics("salt") + mock = MagicMock(return_value={"MAC": "Stack", "device": "ETH0", "MTU": 1}) + with patch.object(xapi, "_get_record", mock): + assert xapi.get_nics("salt") == ret + + +def test_get_macs(): + """ + Test to return a list off MAC addresses from the named vm + """ + mock = MagicMock(side_effect=[None, ["a", "b", "c"]]) + with patch.object(xapi, "get_nics", mock): + assert xapi.get_macs("salt") is None + assert xapi.get_macs("salt") == ["a", "b", "c"] + + +def test_get_disks(): + """ + Test to return the disks of a named vm + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.get_disks("salt") + assert xapi.get_disks("salt") == {} + + +def test_setmem(): + """ + Test to changes the amount of memory allocated to VM. + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.setmem("salt", "1") + assert xapi.setmem("salt", "1") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.setmem("salt", "1") + + +def test_setvcpus(): + """ + Test to changes the amount of vcpus allocated to VM. + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.setvcpus("salt", "1") + assert xapi.setvcpus("salt", "1") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.setvcpus("salt", "1") + + +def test_vcpu_pin(): + """ + Test to Set which CPUs a VCPU can use. + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.vcpu_pin("salt", "1", "2") + assert xapi.vcpu_pin("salt", "1", "2") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + with patch.dict(xapi.__salt__, {"cmd.run": mock}): + assert xapi.vcpu_pin("salt", "1", "2") + + +def test_freemem(): + """ + Test to return an int representing the amount of memory + that has not been given to virtual machines on this node + """ + mock = MagicMock(return_value={"free_memory": 1024}) + with patch.object(xapi, "node_info", mock): + assert xapi.freemem() == 1024 + + +def test_freecpu(): + """ + Test to return an int representing the number + of unallocated cpus on this hypervisor + """ + mock = MagicMock(return_value={"free_cpus": 1024}) + with patch.object(xapi, "node_info", mock): + assert xapi.freecpu() == 1024 + + +def test_full_info(): + """ + Test to return the node_info, vm_info and freemem + """ + mock = MagicMock(return_value="salt") + with patch.object(xapi, "node_info", mock): + mock = MagicMock(return_value="stack") + with patch.object(xapi, "vm_info", mock): + assert xapi.full_info() == {"node_info": "salt", "vm_info": "stack"} + + +def test_shutdown(): + """ + Test to send a soft shutdown signal to the named vm + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.shutdown("salt") + assert xapi.shutdown("salt") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.shutdown("salt") + + +def test_pause(): + """ + Test to pause the named vm + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.pause("salt") + assert xapi.pause("salt") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.pause("salt") + + +def test_resume(): + """ + Test to resume the named vm + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.resume("salt") + assert xapi.resume("salt") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.resume("salt") + + +def test_start(): + """ + Test to reboot a domain via ACPI request + """ + mock = MagicMock(return_value=True) + with patch.object(xapi, "start", mock): + assert xapi.start("salt") + + +def test_reboot(): + """ + Test to reboot a domain via ACPI request + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.reboot("salt") + assert xapi.reboot("salt") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.reboot("salt") + + +def test_reset(): + """ + Test to reset a VM by emulating the + reset button on a physical machine + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.reset("salt") + assert xapi.reset("salt") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.reset("salt") + + +def test_migrate(): + """ + Test to migrates the virtual machine to another hypervisor + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.migrate("salt", "stack") + assert xapi.migrate("salt", "stack") + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.migrate("salt", "stack") + + +def test_stop(): + """ + Test to Hard power down the virtual machine, + this is equivalent to pulling the power + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.stop("salt") + assert xapi.stop("salt") + + with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): + mock = MagicMock(return_value=True) + with patch.dict(xapi.__salt__, {"config.option": mock}): + with patch.object(xapi, "_get_label_uuid", mock): + assert not xapi.stop("salt") + + +def test_is_hyper(): + """ + Test to returns a bool whether or not + this node is a hypervisor of any kind + """ + with patch.dict(xapi.__grains__, {"virtual_subtype": "Dom0"}): + assert not xapi.is_hyper() + with patch.dict(xapi.__grains__, {"virtual": "Xen Dom0"}): + assert not xapi.is_hyper() + with patch.dict(xapi.__grains__, {"virtual_subtype": "Xen Dom0"}): + with patch("salt.utils.files.fopen", mock_open(read_data="salt")): + assert not xapi.is_hyper() + with patch("salt.utils.files.fopen", mock_open()) as mock_read: + mock_read.side_effect = IOError + assert not xapi.is_hyper() + with patch("salt.utils.files.fopen", mock_open(read_data="xen_")): + with patch.dict(xapi.__grains__, {"ps": "salt"}): + mock = MagicMock(return_value={"xenstore": "salt"}) + with patch.dict(xapi.__salt__, {"cmd.run": mock}): + assert xapi.is_hyper() + + +def test_vm_cputime(): + """ + Test to Return cputime used by the vms + """ + ret = {"1": {"cputime_percent": 0, "cputime": 1}} + with patch.object(xapi, "_get_xapi_session", MagicMock()): + mock = MagicMock(return_value={"host_CPUs": "1"}) + with patch.object(xapi, "_get_record_by_label", mock): + mock = MagicMock( + return_value={"VCPUs_number": "1", "VCPUs_utilisation": {"0": "1"}} + ) + with patch.object(xapi, "_get_metrics_record", mock): + assert xapi.vm_cputime("1") == ret + mock = MagicMock(return_value={}) + with patch.object(xapi, "list_domains", mock): + assert xapi.vm_cputime("") == {} + + +def test_vm_netstats(): + """ + Test to return combined network counters used by the vms + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + assert xapi.vm_netstats("") == {} + + +def test_vm_diskstats(): + """ + Test to return disk usage counters used by the vms + """ + with patch.object(xapi, "_get_xapi_session", MagicMock()): + assert xapi.vm_diskstats("") == {} diff --git a/tests/pytests/unit/modules/test_xfs.py b/tests/pytests/unit/modules/test_xfs.py new file mode 100644 index 000000000000..9d1ce9dff34f --- /dev/null +++ b/tests/pytests/unit/modules/test_xfs.py @@ -0,0 +1,113 @@ +""" + Test cases for salt.modules.xfs +""" + + +import textwrap + +import pytest + +import salt.modules.xfs as xfs + +pytestmark = [ + pytest.mark.skip_on_windows(reason="xfs not available on windows"), + pytest.mark.skip_on_darwin(reason="xfs not available on darwin."), +] + + +@pytest.fixture +def configure_loader_modules(): + return {xfs: {}} + + +def test_blkid_output(): + """ + Test xfs._blkid_output when there is data + """ + blkid_export = textwrap.dedent( + """ + DEVNAME=/dev/sda1 + UUID=XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + TYPE=xfs + PARTUUID=YYYYYYYY-YY + + DEVNAME=/dev/sdb1 + PARTUUID=ZZZZZZZZ-ZZZZ-ZZZZ-ZZZZ-ZZZZZZZZZZZZ + """ + ) + # We expect to find only data from /dev/sda1, nothig from + # /dev/sdb1 + assert xfs._blkid_output(blkid_export) == { + "/dev/sda1": { + "label": None, + "partuuid": "YYYYYYYY-YY", + "uuid": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", + } + } + + +def test_parse_xfs_info(): + """ + Test parsing output from mkfs.xfs. + """ + data = textwrap.dedent( + """ + meta-data=/dev/vg00/testvol isize=512 agcount=4, agsize=1310720 blks + = sectsz=4096 attr=2, projid32bit=1 + = crc=1 finobt=1, sparse=1, rmapbt=0 + = reflink=1 + data = bsize=4096 blocks=5242880, imaxpct=25 + = sunit=0 swidth=0 blks + naming =version 2 bsize=4096 ascii-ci=0, ftype=1 + log =internal log bsize=4096 blocks=2560, version=2 + = sectsz=4096 sunit=1 blks, lazy-count=1 + realtime =none extsz=4096 blocks=0, rtextents=0 + Discarding blocks...Done. + """ + ) + + assert xfs._parse_xfs_info(data) == { + "meta-data": { + "section": "/dev/vg00/testvol", + "isize": "512", + "agcount": "4", + "agsize": "1310720 blks", + "sectsz": "4096", + "attr": "2", + "projid32bit": "1", + "crc": "1", + "finobt": "1", + "sparse": "1", + "rmapbt": "0", + "reflink": "1", + }, + "data": { + "section": "data", + "bsize": "4096", + "blocks": "5242880", + "imaxpct": "25", + "sunit": "0", + "swidth": "0 blks", + }, + "naming": { + "section": "version 2", + "bsize": "4096", + "ascii-ci": "0", + "ftype": "1", + }, + "log": { + "section": "internal log", + "bsize": "4096", + "blocks": "2560", + "version": "2", + "sectsz": "4096", + "sunit": "1 blks", + "lazy-count": "1", + }, + "realtime": { + "section": "none", + "extsz": "4096", + "blocks": "0", + "rtextents": "0", + }, + } diff --git a/tests/pytests/unit/modules/test_xml.py b/tests/pytests/unit/modules/test_xml.py new file mode 100644 index 000000000000..334d2f2b73e4 --- /dev/null +++ b/tests/pytests/unit/modules/test_xml.py @@ -0,0 +1,77 @@ +""" + Tests for xml module +""" + + +import pytest + +from salt.modules import xml + + +@pytest.fixture +def xml_string(): + return """ + + + Christian Bale + Liam Neeson + Michael Caine + + + Tom Waits + B.B. King + Ray Charles + + + """ + + +@pytest.fixture +def configure_loader_modules(): + return {xml: {}} + + +def test_get_value(xml_string, tmp_path): + """ + Verify xml.get_value + """ + xml_file = tmp_path / "test_xml.xml" + xml_file.write_text(xml_string) + xml_result = xml.get_value(str(xml_file), ".//actor[@id='2']") + assert xml_result == "Liam Neeson" + + +def test_set_value(xml_string, tmp_path): + """ + Verify xml.set_value + """ + xml_file = tmp_path / "test_xml.xml" + xml_file.write_text(xml_string) + xml_result = xml.set_value(str(xml_file), ".//actor[@id='2']", "Patrick Stewart") + assert xml_result is True + xml_result = xml.get_value(str(xml_file), ".//actor[@id='2']") + assert xml_result == "Patrick Stewart" + + +def test_get_attribute(xml_string, tmp_path): + """ + Verify xml.get_attribute + """ + xml_file = tmp_path / "test_xml.xml" + xml_file.write_text(xml_string) + xml_result = xml.get_attribute(str(xml_file), ".//actor[@id='3']") + assert xml_result == {"id": "3"} + + +def test_set_attribute(xml_string, tmp_path): + """ + Verify xml.set_value + """ + xml_file = tmp_path / "test_xml.xml" + xml_file.write_text(xml_string) + xml_result = xml.set_attribute( + str(xml_file), ".//actor[@id='3']", "edited", "uh-huh" + ) + assert xml_result is True + xml_result = xml.get_attribute(str(xml_file), ".//actor[@id='3']") + assert xml_result == {"edited": "uh-huh", "id": "3"} diff --git a/tests/pytests/unit/modules/test_yumpkg.py b/tests/pytests/unit/modules/test_yumpkg.py index 1354ee5d2d0d..188ed58ec7ed 100644 --- a/tests/pytests/unit/modules/test_yumpkg.py +++ b/tests/pytests/unit/modules/test_yumpkg.py @@ -1,3 +1,4 @@ +import configparser import logging import os @@ -8,11 +9,15 @@ import salt.modules.rpm_lowpkg as rpm import salt.modules.yumpkg as yumpkg import salt.utils.platform -from salt.exceptions import CommandExecutionError, SaltInvocationError +from salt.exceptions import CommandExecutionError, MinionError, SaltInvocationError from tests.support.mock import MagicMock, Mock, call, patch log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.skip_unless_on_linux, +] + @pytest.fixture def configure_loader_modules(): @@ -28,7 +33,9 @@ def _add_data(data, key, value): "os_family": "RedHat", "osmajorrelease": 7, }, - "__salt__": {"pkg_resource.add_pkg": _add_data}, + "__salt__": { + "pkg_resource.add_pkg": _add_data, + }, }, pkg_resource: {}, } @@ -36,7 +43,6 @@ def _add_data(data, key, value): @pytest.fixture(scope="module") def list_repos_var(): - return { "base": { "file": "/etc/yum.repos.d/CentOS-Base.repo", @@ -93,6 +99,71 @@ def yum_and_dnf(request): yield request.param["cmd"] +def test__virtual_normal(): + assert yumpkg.__virtual__() == "pkg" + + +def test__virtual_yumpkg_api(): + with patch.dict(yumpkg.__opts__, {"yum_provider": "yumpkg_api"}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: yumpkg_api provider not available", + ) + + +def test__virtual_exception(): + with patch.dict(yumpkg.__grains__, {"os": 1}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: no yum based system detected", + ) + + +def test__virtual_no_yum(): + with patch.object(yumpkg, "_yum", MagicMock(return_value=None)): + assert yumpkg.__virtual__() == (False, "DNF nor YUM found") + + +def test__virtual_non_yum_system(): + with patch.dict(yumpkg.__grains__, {"os_family": "ubuntu"}): + assert yumpkg.__virtual__() == ( + False, + "Module yumpkg: no yum based system detected", + ) + + +def test_strip_headers(): + output = os.linesep.join(["spongebob", "squarepants", "squidward"]) + args = ("spongebob", "squarepants") + assert yumpkg._strip_headers(output, *args) == "squidward\n" + + +def test_get_copr_repo(): + result = yumpkg._get_copr_repo("copr:spongebob/squarepants") + assert result == "copr:copr.fedorainfracloud.org:spongebob:squarepants" + + +def test_get_hold(): + line = "vim-enhanced-2:7.4.827-1.fc22" + with patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")): + assert yumpkg._get_hold(line) == "vim-enhanced-2:7.4.827-1.fc22" + + +def test_get_options(): + result = yumpkg._get_options( + repo="spongebob", + disableexcludes="squarepants", + __dunder_keyword="this is skipped", + stringvalue="string_value", + boolvalue=True, + get_extra_options=True, + ) + assert "--enablerepo=spongebob" in result + assert "--disableexcludes=squarepants" in result + assert "--stringvalue=string_value" in result + assert "--boolvalue" in result + + def test_list_pkgs(): """ Test packages listing. @@ -118,9 +189,10 @@ def _add_data(data, key, value): "openssh_|-(none)_|-6.6.1p1_|-33.el7_3_|-x86_64_|-(none)_|-1487838485", "virt-what_|-(none)_|-1.13_|-8.el7_|-x86_64_|-(none)_|-1487838486", ] + cmd_mod = MagicMock(return_value=os.linesep.join(rpm_out)) with patch.dict(yumpkg.__grains__, {"osarch": "x86_64"}), patch.dict( yumpkg.__salt__, - {"cmd.run": MagicMock(return_value=os.linesep.join(rpm_out))}, + {"cmd.run": cmd_mod}, ), patch.dict(yumpkg.__salt__, {"pkg_resource.add_pkg": _add_data}), patch.dict( yumpkg.__salt__, {"pkg_resource.format_pkg_list": pkg_resource.format_pkg_list}, @@ -147,6 +219,18 @@ def _add_data(data, key, value): }.items(): assert pkgs.get(pkg_name) is not None assert pkgs[pkg_name] == [pkg_version] + cmd_mod.assert_called_once_with( + [ + "rpm", + "-qa", + "--nodigest", + "--nosignature", + "--queryformat", + "%{NAME}_|-%{EPOCH}_|-%{VERSION}_|-%{RELEASE}_|-%{ARCH}_|-(none)_|-%{INSTALLTIME}\n", + ], + output_loglevel="trace", + python_shell=False, + ) def test_list_pkgs_no_context(): @@ -455,6 +539,16 @@ def test_list_patches(): assert _patch in patches["my-fake-patch-installed-1234"]["summary"] +def test_list_patches_refresh(): + expected = ["spongebob"] + mock_get_patches = MagicMock(return_value=expected) + patch_get_patches = patch.object(yumpkg, "_get_patches", mock_get_patches) + patch_refresh_db = patch.object(yumpkg, "refresh_db", MagicMock()) + with patch_refresh_db, patch_get_patches: + result = yumpkg.list_patches(refresh=True) + assert result == expected + + def test_latest_version_with_options(): with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})): @@ -546,6 +640,66 @@ def test_latest_version_with_options(): ) +def test_list_repo_pkgs_attribute_error(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + mock_run = MagicMock(return_value="3.4.5") + patch_run = patch.dict(yumpkg.__salt__, {"cmd.run": mock_run}) + mock_yum = MagicMock(return_value={"retcode": 0, "stdout": ""}) + patch_yum = patch.object(yumpkg, "_call_yum", mock_yum) + with patch_get_options, patch_run, patch_yum: + assert yumpkg.list_repo_pkgs(fromrepo=1, disablerepo=2, enablerepo=3) == {} + + +def test_list_repo_pkgs_byrepo(list_repos_var): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + stdout_installed = """\ +Installed Packages +spongebob.x86_64 1.1.el9_1 @bikini-bottom-rpms +squarepants.x86_64 1.2.el9_1 @bikini-bottom-rpms +patrick.noarch 1.3.el9_1 @rock-bottom-rpms +squidward.x86_64 1.4.el9_1 @rock-bottom-rpms""" + stdout_available = """\ +Available Packages +plankton.noarch 2.1-1.el9_2 bikini-bottom-rpms +dennis.x86_64 2.2-2.el9 bikini-bottom-rpms +man-ray.x86_64 2.3-1.el9_2 bikini-bottom-rpms +doodlebob.x86_64 2.4-1.el9_2 bikini-bottom-rpms""" + run_all_side_effect = ( + {"retcode": 0, "stdout": stdout_installed}, + {"retcode": 0, "stdout": stdout_available}, + ) + patch_salt = patch.dict( + yumpkg.__salt__, + { + "cmd.run": MagicMock(return_value="3.4.5"), + "cmd.run_all": MagicMock(side_effect=run_all_side_effect), + "config.get": MagicMock(return_value=False), + }, + ) + patch_list_repos = patch.object( + yumpkg, + "list_repos", + MagicMock(return_value=list_repos_var), + ) + with patch_get_options, patch_salt, patch_list_repos: + expected = { + "bikini-bottom-rpms": { + "dennis": ["2.2-2.el9"], + "doodlebob": ["2.4-1.el9_2"], + "man-ray": ["2.3-1.el9_2"], + "plankton": ["2.1-1.el9_2"], + "spongebob": ["1.1.el9_1"], + "squarepants": ["1.2.el9_1"], + }, + "rock-bottom-rpms": { + "patrick": ["1.3.el9_1"], + "squidward": ["1.4.el9_1"], + }, + } + result = yumpkg.list_repo_pkgs(byrepo=True) + assert result == expected + + def test_list_repo_pkgs_with_options(list_repos_var): """ Test list_repo_pkgs with and without fromrepo @@ -749,6 +903,87 @@ def test_list_upgrades_dnf(): ) +def test_list_upgrades_refresh(): + mock_call_yum = MagicMock(return_value={"retcode": 0, "stdout": ""}) + with patch.object(yumpkg, "refresh_db", MagicMock()): + with patch.object(yumpkg, "_call_yum", mock_call_yum): + assert yumpkg.list_upgrades(refresh=True) == {} + + +def test_list_upgrades_error(): + mock_return = {"retcode": 1, "Error:": "Error"} + mock_call_yum = MagicMock(return_value=mock_return) + with patch.object(yumpkg, "_call_yum", mock_call_yum): + assert yumpkg.list_upgrades(refresh=False) == {} + + +def test_list_downloaded(): + mock_walk = MagicMock( + return_value=[ + ( + "/var/cache/yum", + [], + ["pkg1-3.1-16.1.x86_64.rpm", "pkg2-1.2-13.2.x86_64.rpm"], + ) + ] + ) + mock_pkginfo = MagicMock( + side_effect=[ + { + "name": "pkg1", + "version": "3.1", + }, + { + "name": "pkg2", + "version": "1.2", + }, + ] + ) + mock_getctime = MagicMock(return_value=1696536082.861206) + mock_getsize = MagicMock(return_value=75701688) + with patch.dict(yumpkg.__salt__, {"lowpkg.bin_pkg_info": mock_pkginfo}), patch( + "salt.utils.path.os_walk", mock_walk + ), patch("os.path.getctime", mock_getctime), patch("os.path.getsize", mock_getsize): + result = yumpkg.list_downloaded() + expected = { + "pkg1": { + "3.1": { + "creation_date_time": "2023-10-05T14:01:22", + "creation_date_time_t": 1696536082, + "path": "/var/cache/yum/pkg1-3.1-16.1.x86_64.rpm", + "size": 75701688, + }, + }, + "pkg2": { + "1.2": { + "creation_date_time": "2023-10-05T14:01:22", + "creation_date_time_t": 1696536082, + "path": "/var/cache/yum/pkg2-1.2-13.2.x86_64.rpm", + "size": 75701688, + }, + }, + } + assert ( + result["pkg1"]["3.1"]["creation_date_time_t"] + == expected["pkg1"]["3.1"]["creation_date_time_t"] + ) + assert result["pkg1"]["3.1"]["path"] == expected["pkg1"]["3.1"]["path"] + assert result["pkg1"]["3.1"]["size"] == expected["pkg1"]["3.1"]["size"] + assert ( + result["pkg2"]["1.2"]["creation_date_time_t"] + == expected["pkg2"]["1.2"]["creation_date_time_t"] + ) + assert result["pkg2"]["1.2"]["path"] == expected["pkg2"]["1.2"]["path"] + assert result["pkg2"]["1.2"]["size"] == expected["pkg2"]["1.2"]["size"] + + +def test_list_installed_patches(): + mock_get_patches = MagicMock(return_value="spongebob") + with patch.object(yumpkg, "_get_patches", mock_get_patches): + result = yumpkg.list_installed_patches() + assert result == "spongebob" + + def test_list_upgrades_yum(): """ The subcommand should be "updates" with yum @@ -802,6 +1037,213 @@ def test_list_upgrades_yum(): ) +def test_modified(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.modified": mock}): + yumpkg.modified("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + +def test_clean_metadata_with_options(): + + with patch("salt.utils.pkg.clear_rtag", Mock()): + + # With check_update=True we will do a cmd.run to run the clean_cmd, and + # then a separate cmd.retcode to check for updates. + + # with fromrepo + yum_call = MagicMock() + with patch.dict( + yumpkg.__salt__, + {"cmd.run_all": yum_call, "config.get": MagicMock(return_value=False)}, + ): + yumpkg.clean_metadata(check_update=True, fromrepo="good", branch="foo") + + assert yum_call.call_count == 2 + yum_call.assert_any_call( + [ + "yum", + "--quiet", + "--assumeyes", + "clean", + "expire-cache", + "--disablerepo=*", + "--enablerepo=good", + "--branch=foo", + ], + env={}, + ignore_retcode=True, + output_loglevel="trace", + python_shell=False, + ) + yum_call.assert_any_call( + [ + "yum", + "--quiet", + "--assumeyes", + "check-update", + "--setopt=autocheck_running_kernel=false", + "--disablerepo=*", + "--enablerepo=good", + "--branch=foo", + ], + output_loglevel="trace", + env={}, + ignore_retcode=True, + python_shell=False, + ) + + +def test_del_repo_error(): + basedir = "/mr/krabs" + ret_dict = { + "spongebob": {"file": "/square/pants"}, + "patrick": {"file": "/squid/ward"}, + } + mock_list = MagicMock(return_value=ret_dict) + patch_list = patch.object(yumpkg, "list_repos", mock_list) + with patch_list: + result = yumpkg.del_repo("plankton", basedir=basedir) + expected = "Error: the plankton repo does not exist in ['/mr/krabs']" + assert result == expected + + result = yumpkg.del_repo("copr:plankton/karen", basedir=basedir) + expected = "Error: the copr:copr.fedorainfracloud.org:plankton:karen repo does not exist in ['/mr/krabs']" + assert result == expected + + +def test_del_repo_single_file(): + basedir = "/mr/krabs" + ret_dict = { + "spongebob": {"file": "/square/pants"}, + "patrick": {"file": "/squid/ward"}, + } + mock_list = MagicMock(return_value=ret_dict) + patch_list = patch.object(yumpkg, "list_repos", mock_list) + with patch_list, patch("os.remove"): + result = yumpkg.del_repo("spongebob", basedir=basedir) + expected = "File /square/pants containing repo spongebob has been removed" + assert result == expected + + +def test_download_error_no_packages(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + with patch_which, pytest.raises(SaltInvocationError): + yumpkg.download() + + +def test_download(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + patch_exists = patch("os.path.exists", MagicMock(return_value=False)) + patch_makedirs = patch("os.makedirs") + mock_listdir = MagicMock(side_effect=([], ["spongebob-1.2.rpm"])) + patch_listdir = patch("os.listdir", mock_listdir) + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_which, patch_exists, patch_makedirs, patch_listdir, patch_salt: + result = yumpkg.download("spongebob") + cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + assert result == expected + + +def test_download_failed(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + mock_listdir = MagicMock(return_value=["spongebob-1.2.rpm", "junk.txt"]) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink") + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: + result = yumpkg.download("spongebob", "patrick") + cmd = [ + "yumdownloader", + "-q", + "--destdir=/var/cache/yum/packages", + "spongebob", + "patrick", + ] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = { + "_error": "The following package(s) failed to download: patrick", + "spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm", + } + assert result == expected + + +def test_download_missing_yumdownloader(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value=None)) + with patch_which, pytest.raises(CommandExecutionError): + yumpkg.download("spongebob") + + +def test_download_to_purge(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + mock_listdir = MagicMock(return_value=["spongebob-1.2.rpm", "junk.txt"]) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink") + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: + result = yumpkg.download("spongebob") + cmd = ["yumdownloader", "-q", "--destdir=/var/cache/yum/packages", "spongebob"] + mock_run.assert_called_once_with( + cmd, output_loglevel="trace", python_shell=False + ) + expected = {"spongebob": "/var/cache/yum/packages/spongebob-1.2.rpm"} + assert result == expected + + +def test_download_unlink_error(): + patch_which = patch("salt.utils.path.which", MagicMock(return_value="path.exe")) + patch_exists = patch("os.path.exists", MagicMock(return_value=True)) + se_listdir = ( + ["spongebob-1.2.rpm", "junk.txt"], + ["spongebob1.2.rpm", "junk.txt"], + ) + mock_listdir = MagicMock(side_effect=se_listdir) + patch_listdir = patch("os.listdir", mock_listdir) + patch_unlink = patch("os.unlink", MagicMock(side_effect=OSError)) + mock_run = MagicMock() + dict_salt = { + "cmd.run": mock_run, + } + patch_salt = patch.dict(yumpkg.__salt__, dict_salt) + with patch_which, patch_exists, patch_listdir, patch_unlink, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.download("spongebob") + + +def test_file_dict(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.file_dict": mock}): + yumpkg.file_dict("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + +def test_file_list(): + mock = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.file_list": mock}): + yumpkg.file_list("spongebob", "squarepants") + mock.assert_called_once_with("spongebob", "squarepants") + + def test_refresh_db_with_options(): with patch("salt.utils.pkg.clear_rtag", Mock()): @@ -1027,6 +1469,36 @@ def test_install_with_options(): ) +def test_remove_retcode_error(): + """ + Tests that we throw an error if retcode isn't 0 + """ + name = "foo" + installed = "8:3.8.12-4.n.el7" + list_pkgs_mock = MagicMock( + side_effect=lambda **kwargs: { + name: [installed] if kwargs.get("versions_as_list", False) else installed + } + ) + cmd_mock = MagicMock( + return_value={"pid": 12345, "retcode": 1, "stdout": "", "stderr": "error"} + ) + salt_mock = { + "cmd.run_all": cmd_mock, + "lowpkg.version_cmp": rpm.version_cmp, + "pkg_resource.parse_targets": MagicMock( + return_value=({name: installed}, "repository") + ), + } + with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( + "salt.utils.systemd.has_scope", MagicMock(return_value=False) + ), patch.dict(yumpkg.__salt__, salt_mock), patch.dict( + yumpkg.__grains__, {"os": "CentOS", "osrelease": 7} + ): + with pytest.raises(CommandExecutionError): + yumpkg.remove("spongebob") + + def test_remove_with_epoch(): """ Tests that we properly identify a version containing an epoch for @@ -1215,6 +1687,54 @@ def test_install_with_epoch(): assert call == expected, call +def test_install_minion_error(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + patch_salt = patch.dict( + yumpkg.__salt__, + { + "pkg_resource.parse_targets": MagicMock(side_effect=MinionError), + }, + ) + with patch_get_options, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.install("spongebob") + + +def test_install_no_pkg_params(): + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + parse_return = ("", "junk") + patch_salt = patch.dict( + yumpkg.__salt__, + { + "pkg_resource.parse_targets": MagicMock(return_value=parse_return), + }, + ) + with patch_get_options, patch_salt: + assert yumpkg.install("spongebob") == {} + + +# My dufus attempt... but I gave up +# def test_install_repo_fancy_versions(): +# patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) +# packages = { +# "spongbob": "1*", +# "squarepants": ">1.2", +# } +# parse_return = (packages, "repository") +# patch_salt = patch.dict( +# yumpkg.__salt__, +# { +# "pkg_resource.parse_targets": MagicMock(return_value=parse_return), +# }, +# ) +# list_pkgs = {"vim": "1.1,1.2", "git": "2.1,2.2"} +# list_pkgs_list = {"vim": ["1.1", "1.2"], "git": ["2.1", "2.2"]} +# mock_list_pkgs = MagicMock(side_effect=(list_pkgs, list_pkgs_list)) +# patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) +# with patch_get_options, patch_salt, patch_list_pkgs: +# assert yumpkg.install("spongebob") == {} + + @pytest.mark.skipif(not salt.utils.platform.is_linux(), reason="Only run on Linux") def test_install_error_reporting(): """ @@ -1253,6 +1773,13 @@ def test_install_error_reporting(): assert exc_info.value.info == expected, exc_info.value.info +def test_remove_error(): + mock_salt = {"pkg_resource.parse_targets": MagicMock(side_effect=MinionError)} + with patch.dict(yumpkg.__salt__, mock_salt): + with pytest.raises(CommandExecutionError): + yumpkg.remove("spongebob") + + def test_remove_not_installed(): """ Tests that no exception raised on removing not installed package @@ -1290,6 +1817,17 @@ def test_remove_not_installed(): cmd_mock.assert_not_called() +def test_upgrade_error(): + patch_yum = patch.object(yumpkg, "_yum", return_value="yum") + patch_get_options = patch.object(yumpkg, "_get_options") + patch_list_pkgs = patch.object(yumpkg, "list_pkgs") + salt_dict = {"pkg_resource.parse_targets": MagicMock(side_effect=MinionError)} + patch_salt = patch.dict(yumpkg.__salt__, salt_dict) + with patch_yum, patch_get_options, patch_list_pkgs, patch_salt: + with pytest.raises(CommandExecutionError): + yumpkg.upgrade("spongebob", refresh=False) + + def test_upgrade_with_options(): with patch.object(yumpkg, "list_pkgs", MagicMock(return_value={})), patch( "salt.utils.systemd.has_scope", MagicMock(return_value=False) @@ -1304,6 +1842,7 @@ def test_upgrade_with_options(): exclude="kernel*", branch="foo", setopt="obsoletes=0,plugins=0", + skip_verify=True, ) cmd.assert_called_once_with( [ @@ -1318,6 +1857,7 @@ def test_upgrade_with_options(): "--setopt", "plugins=0", "--exclude=kernel*", + "--nogpgcheck", "upgrade", ], env={}, @@ -1325,6 +1865,19 @@ def test_upgrade_with_options(): python_shell=False, ) + # with fromrepo + cmd = MagicMock(return_value={"retcode": 1}) + with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}): + with pytest.raises(CommandExecutionError): + yumpkg.upgrade( + refresh=False, + fromrepo="good", + exclude="kernel*", + branch="foo", + setopt="obsoletes=0,plugins=0", + skip_verify=True, + ) + # without fromrepo cmd = MagicMock(return_value={"retcode": 0}) with patch.dict(yumpkg.__salt__, {"cmd.run_all": cmd}): @@ -1357,6 +1910,64 @@ def test_upgrade_with_options(): ) +def test_upgrade_available(): + mock_return = MagicMock(return_value="non-empty value") + patch_latest_version = patch.object(yumpkg, "latest_version", mock_return) + with patch_latest_version: + assert yumpkg.upgrade_available("foo") is True + + +def test_verify_args(): + mock_verify = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.verify": mock_verify}): + yumpkg.verify("spongebob") + mock_verify.assert_called_once_with("spongebob") + + +def test_verify_kwargs(): + mock_verify = MagicMock() + with patch.dict(yumpkg.__salt__, {"lowpkg.verify": mock_verify}): + yumpkg.verify(spongebob="squarepants") + mock_verify.assert_called_once_with(spongebob="squarepants") + + +def test_purge_not_installed(): + """ + Tests that no exception raised on purging not installed package + """ + name = "foo" + list_pkgs_mock = MagicMock(return_value={}) + cmd_mock = MagicMock( + return_value={"pid": 12345, "retcode": 0, "stdout": "", "stderr": ""} + ) + salt_mock = { + "cmd.run_all": cmd_mock, + "lowpkg.version_cmp": rpm.version_cmp, + "pkg_resource.parse_targets": MagicMock( + return_value=({name: None}, "repository") + ), + } + with patch.object(yumpkg, "list_pkgs", list_pkgs_mock), patch( + "salt.utils.systemd.has_scope", MagicMock(return_value=False) + ), patch.dict(yumpkg.__salt__, salt_mock): + + # Test yum + with patch.dict(yumpkg.__context__, {"yum_bin": "yum"}), patch.dict( + yumpkg.__grains__, {"os": "CentOS", "osrelease": 7} + ): + yumpkg.purge(name) + cmd_mock.assert_not_called() + + # Test dnf + yumpkg.__context__.pop("yum_bin") + cmd_mock.reset_mock() + with patch.dict(yumpkg.__context__, {"yum_bin": "dnf"}), patch.dict( + yumpkg.__grains__, {"os": "Fedora", "osrelease": 27} + ): + yumpkg.purge(name) + cmd_mock.assert_not_called() + + def test_info_installed_with_all_versions(): """ Test the return information of all versions for the named package(s), installed on the system. @@ -1512,6 +2123,260 @@ def test_pkg_hold_tdnf(): yumpkg.hold("foo") +def test_hold_empty(): + """ + Tests that we raise a SaltInvocationError if nothing is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.hold() + + +def test_hold_pkgs_and_sources_error(): + """ + Tests that we raise a SaltInvocationError if both pkgs and sources is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.hold(pkgs=["foo", "bar"], sources=["src1", "src2"]) + + +def test_hold_pkgs_sources(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + expected = { + "foo": { + "name": "foo", + "changes": { + "new": "hold", + "old": "", + }, + "result": True, + "comment": "Package foo is now being held.", + }, + "bar": { + "name": "bar", + "changes": { + "new": "hold", + "old": "", + }, + "result": True, + "comment": "Package bar is now being held.", + }, + } + sources = [{"foo": "salt://foo.rpm"}, {"bar": "salt://bar.rpm"}] + pkgs = ["foo", "bar"] + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(sources=sources) + assert result == expected + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(pkgs=pkgs) + assert result == expected + + +def test_hold_test_true(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": True}) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": None, + "comment": "Package foo is set to be held.", + }, + } + assert result == expected + + +def test_hold_fails(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + patch_list_holds = patch.object(yumpkg, "list_holds", MagicMock()) + mock_call_yum = MagicMock(return_value={"retcode": 1}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": False, + "comment": "Package foo was unable to be held.", + }, + } + assert result == expected + + +def test_hold_already_held(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + with patch_versionlock, patch_list_holds: + result = yumpkg.hold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": True, + "comment": "Package foo is already set to be held.", + }, + } + assert result == expected + + +def test_unhold_empty(): + """ + Tests that we raise a SaltInvocationError if nothing is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.unhold() + + +def test_unhold_pkgs_and_sources_error(): + """ + Tests that we raise a SaltInvocationError if both pkgs and sources is passed + """ + with patch.object(yumpkg, "_check_versionlock", MagicMock()): + with pytest.raises(SaltInvocationError): + yumpkg.unhold(pkgs=["foo", "bar"], sources=["src1", "src2"]) + + +def test_unhold_pkgs_sources(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo", "bar"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + mock_call_yum = MagicMock(return_value={"retcode": 0}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + expected = { + "foo": { + "name": "foo", + "changes": { + "new": "", + "old": "hold", + }, + "result": True, + "comment": "Package foo is no longer held.", + }, + "bar": { + "name": "bar", + "changes": { + "new": "", + "old": "hold", + }, + "result": True, + "comment": "Package bar is no longer held.", + }, + } + sources = [{"foo": "salt://foo.rpm"}, {"bar": "salt://bar.rpm"}] + pkgs = ["foo", "bar"] + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(sources=sources) + assert result == expected + + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(pkgs=pkgs) + assert result == expected + + +def test_unhold_test_true(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + patch_opts = patch.dict(yumpkg.__opts__, {"test": True}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + with patch_versionlock, patch_list_holds, patch_opts, patch_yum: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": None, + "comment": "Package foo is set to be unheld.", + }, + } + assert result == expected + + +def test_unhold_fails(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=["foo"]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + mock_call_yum = MagicMock(return_value={"retcode": 1}) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + patch_opts = patch.dict(yumpkg.__opts__, {"test": False}) + patch_yum = patch.object(yumpkg, "_yum", MagicMock(return_value="dnf")) + with patch_versionlock, patch_list_holds, patch_call_yum, patch_opts, patch_yum: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": False, + "comment": "Package foo was unable to be unheld.", + }, + } + assert result == expected + + +def test_unhold_already_unheld(): + patch_versionlock = patch.object(yumpkg, "_check_versionlock", MagicMock()) + mock_list_holds = MagicMock(return_value=[]) + patch_list_holds = patch.object(yumpkg, "list_holds", mock_list_holds) + with patch_versionlock, patch_list_holds: + result = yumpkg.unhold(name="foo") + expected = { + "foo": { + "name": "foo", + "changes": {}, + "result": True, + "comment": "Package foo is not being held.", + }, + } + assert result == expected + + +def test_owner_empty(): + assert yumpkg.owner() == "" + + +def test_owner_not_owned(): + mock_stdout = MagicMock(return_value="not owned") + expected = { + "/fake/path1": "", + "/fake/path2": "", + } + with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_stdout}): + result = yumpkg.owner(*expected.keys()) + assert result == expected + + +def test_owner_not_owned_single(): + mock_stdout = MagicMock(return_value="not owned") + with patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_stdout}): + result = yumpkg.owner("/fake/path") + assert result == "" + + +def test_parse_repo_file_error(): + mock_read = MagicMock( + side_effect=configparser.MissingSectionHeaderError("spongebob", 101, "test2") + ) + with patch.object(configparser.ConfigParser, "read", mock_read): + result = yumpkg._parse_repo_file("spongebob") + assert result == ("", {}) + + def test_pkg_hold_dnf(): """ Tests that we properly identify versionlock plugin when using dnf @@ -1593,14 +2458,84 @@ def test_pkg_hold_dnf(): ) -@pytest.mark.skipif(not yumpkg.HAS_YUM, reason="Could not import yum") -def test_yum_base_error(): - with patch("yum.YumBase") as mock_yum_yumbase: - mock_yum_yumbase.side_effect = CommandExecutionError +def test_get_yum_config_no_config(): + with patch("os.path.exists", MagicMock(return_value=False)): + with pytest.raises(CommandExecutionError): + yumpkg._get_yum_config() + + +def test_get_yum_config(grains): + os_family = grains["os_family"] + if os_family in ("Arch", "Debian", "Suse"): + pytest.skip(f"{os_family} does not have yum.conf") + setting = "cache_dir" + if os_family == "RedHat": + # This one seems to be in all of them... + # If this ever breaks in the future, we'll need to get more specific + # than os_family + setting = "installonly_limit" + result = yumpkg._get_yum_config() + assert setting in result + + +def test_get_yum_config_value_none(grains): + os_family = grains["os_family"] + if os_family in ("Arch", "Debian", "Suse"): + pytest.skip(f"{os_family} does not have yum.conf") + result = yumpkg._get_yum_config_value("spongebob") + assert result is None + + +def test_get_yum_config_unreadable(): + with patch.object( + configparser.ConfigParser, "read", MagicMock(side_effect=OSError) + ): with pytest.raises(CommandExecutionError): yumpkg._get_yum_config() +def test_get_yum_config_no_main(caplog): + mock_false = MagicMock(return_value=False) + with patch.object(configparser.ConfigParser, "read"), patch.object( + configparser.ConfigParser, "has_section", mock_false + ), patch("os.path.exists", MagicMock(return_value=True)): + yumpkg._get_yum_config() + assert "Could not find [main] section" in caplog.text + + +def test_normalize_basedir_str(): + basedir = "/etc/yum/yum.conf,/etc/yum.conf" + result = yumpkg._normalize_basedir(basedir) + assert result == ["/etc/yum/yum.conf", "/etc/yum.conf"] + + +def test_normalize_basedir_error(): + basedir = 1 + with pytest.raises(SaltInvocationError): + yumpkg._normalize_basedir(basedir) + + +def test_normalize_name_noarch(): + assert yumpkg.normalize_name("zsh.noarch") == "zsh" + + +def test_latest_version_no_names(): + assert yumpkg.latest_version() == "" + + +def test_latest_version_nonzero_retcode(): + yum_ret = {"retcode": 1, "stderr": "some error"} + mock_call_yum = MagicMock(return_value=yum_ret) + patch_call_yum = patch.object(yumpkg, "_call_yum", mock_call_yum) + list_pkgs_ret = {"foo": "1.1", "bar": "2.2"} + mock_list_pkgs = MagicMock(return_value=list_pkgs_ret) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_get_options = patch.object(yumpkg, "_get_options", MagicMock()) + patch_refresh_db = patch.object(yumpkg, "refresh_db", MagicMock()) + with patch_list_pkgs, patch_call_yum, patch_get_options, patch_refresh_db: + assert yumpkg.latest_version("foo", "bar") == {"foo": "", "bar": ""} + + def test_group_info(): """ Test yumpkg.group_info parsing @@ -1842,6 +2777,180 @@ def test_group_info(): assert info == expected +def test_group_install(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs") + _, kwargs = yumpkg.install.call_args + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_include(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs", include="napoleon") + _, kwargs = yumpkg.install.call_args + expected.append("napoleon") + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_skip(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + mock_list_pkgs = MagicMock(return_value=[]) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "spongebob", + "patrick", + ] + with patch_info, patch_list_pkgs, patch_install: + yumpkg.group_install("spongebob,mr_krabs", skip="plankton") + _, kwargs = yumpkg.install.call_args + assert kwargs["pkgs"].sort() == expected.sort() + + +def test_group_install_already_present(): + group_info = ( + { + "default": ["spongebob", "gary", "patrick"], + "mandatory": ["spongebob", "gary"], + }, + { + "default": ["mr_krabs", "pearl_krabs", "plankton"], + "mandatory": ["mr_krabs", "pearl_krabs"], + }, + ) + mock_info = MagicMock(side_effect=group_info) + patch_info = patch.object(yumpkg, "group_info", mock_info) + patch_install = patch.object(yumpkg, "install", MagicMock()) + expected = [ + "mr_krabs", + "gary", + "pearl_krabs", + "plankton", + "spongebob", + "patrick", + ] + mock_list_pkgs = MagicMock(return_value=expected) + patch_list_pkgs = patch.object(yumpkg, "list_pkgs", mock_list_pkgs) + with patch_info, patch_list_pkgs, patch_install: + assert yumpkg.group_install("spongebob,mr_krabs") == {} + + +def test_group_install_no_groups(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(None) + + +def test_group_install_non_list_groups(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(1) + + +def test_group_install_non_list_skip(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(name="string", skip=1) + + +def test_group_install_non_list_include(): + with pytest.raises(SaltInvocationError): + yumpkg.group_install(name="string", include=1) + + +def test_group_list(): + mock_out = MagicMock( + return_value="""\ +Available Environment Groups: + Spongebob + Squarepants +Installed Environment Groups: + Patrick +Installed Groups: + Squidward + Sandy +Available Groups: + Mr. Krabs + Plankton +Available Language Groups: + Gary the Snail [sb]\ + """ + ) + patch_grplist = patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_out}) + with patch_grplist: + result = yumpkg.group_list() + expected = { + "installed": ["Squidward", "Sandy"], + "available": ["Mr. Krabs", "Plankton"], + "installed environments": ["Patrick"], + "available environments": ["Spongebob", "Squarepants"], + "available languages": { + "Gary the Snail [sb]": { + "language": "sb", + "name": "Gary the Snail", + }, + }, + } + assert result == expected + + def test_get_repo_with_existent_repo(list_repos_var): """ Test get_repo with an existent repository @@ -2050,6 +3159,15 @@ def test_services_need_restart_requires_dnf(): pytest.raises(CommandExecutionError, yumpkg.services_need_restart) +def test_services_need_restart_no_dnf_output(): + patch_yum = patch("salt.modules.yumpkg._yum", Mock(return_value="dnf")) + patch_booted = patch("salt.utils.systemd.booted", Mock(return_value=True)) + mock_run_stdout = MagicMock(return_value="") + patch_run_stdout = patch.dict(yumpkg.__salt__, {"cmd.run_stdout": mock_run_stdout}) + with patch_yum, patch_booted, patch_run_stdout: + assert yumpkg.services_need_restart() == [] + + def test_61003_pkg_should_not_fail_when_target_not_in_old_pkgs(): patch_list_pkgs = patch( "salt.modules.yumpkg.list_pkgs", return_value={}, autospec=True diff --git a/tests/pytests/unit/modules/test_zfs.py b/tests/pytests/unit/modules/test_zfs.py index 93f8087df9aa..a9d8a3016ec4 100644 --- a/tests/pytests/unit/modules/test_zfs.py +++ b/tests/pytests/unit/modules/test_zfs.py @@ -18,6 +18,10 @@ from tests.support.mock import MagicMock, patch from tests.support.zfs import ZFSMockData +pytestmark = [ + pytest.mark.slow_test, +] + @pytest.fixture def utils_patch(): diff --git a/tests/pytests/unit/modules/test_zpool.py b/tests/pytests/unit/modules/test_zpool.py index c4b8fd072e83..48fcf9e272c0 100644 --- a/tests/pytests/unit/modules/test_zpool.py +++ b/tests/pytests/unit/modules/test_zpool.py @@ -19,6 +19,10 @@ from tests.support.mock import MagicMock, patch from tests.support.zfs import ZFSMockData +pytestmark = [ + pytest.mark.slow_test, +] + @pytest.fixture def utils_patch(): diff --git a/tests/pytests/unit/modules/win_lgpo/test__policy_info.py b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py new file mode 100644 index 000000000000..0b9e25ee4d50 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test__policy_info.py @@ -0,0 +1,410 @@ +import socket + +import pytest + +import salt.modules.cmdmod +import salt.modules.win_file +import salt.modules.win_lgpo as win_lgpo +from salt.exceptions import CommandExecutionError +from tests.support.mock import patch + +try: + import win32security as ws + + HAS_WIN32 = True +except ImportError: + HAS_WIN32 = False + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, + pytest.mark.skipif(not HAS_WIN32, reason="Failed to import win32security"), +] + + +@pytest.fixture +def configure_loader_modules(): + return { + win_lgpo: { + "__salt__": { + "cmd.run": salt.modules.cmdmod.run, + "file.file_exists": salt.modules.win_file.file_exists, + "file.remove": salt.modules.win_file.remove, + }, + }, + } + + +@pytest.fixture(scope="module") +def pol_info(): + return win_lgpo._policy_info() + + +@pytest.mark.parametrize( + "val, expected", + ( + (0, False), + (1, True), + ("", False), + ("text", True), + ([], False), + ([1, 2, 3], True), + ), +) +def test_notEmpty(pol_info, val, expected): + assert pol_info._notEmpty(val) is expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (86400, 1), + ), +) +def test_seconds_to_days(pol_info, val, expected): + assert pol_info._seconds_to_days(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (1, 86400), + ), +) +def test_days_to_seconds(pol_info, val, expected): + assert pol_info._days_to_seconds(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (60, 1), + ), +) +def test_seconds_to_minutes(pol_info, val, expected): + assert pol_info._seconds_to_minutes(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (0, 0), + (1, 60), + ), +) +def test_minutes_to_seconds(pol_info, val, expected): + assert pol_info._minutes_to_seconds(val) == expected + + +def test_strip_quotes(pol_info): + assert pol_info._strip_quotes('"spongebob"') == "spongebob" + + +def test_add_quotes(pol_info): + assert pol_info._add_quotes("squarepants") == '"squarepants"' + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + (chr(0), "Disabled"), + (chr(1), "Enabled"), + (chr(2), "Invalid Value: {!r}".format(chr(2))), + ("patrick", "Invalid Value"), + ), +) +def test_binary_enable_zero_disable_one_conversion(pol_info, val, expected): + assert pol_info._binary_enable_zero_disable_one_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, None), + ("Disabled", chr(0)), + ("Enabled", chr(1)), + ("Junk", None), + ), +) +def test_binary_enable_zero_disable_one_reverse_conversion(pol_info, val, expected): + assert pol_info._binary_enable_zero_disable_one_reverse_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("0", "Administrators"), + (0, "Administrators"), + ("", "Administrators"), + ("1", "Administrators and Power Users"), + (1, "Administrators and Power Users"), + ("2", "Administrators and Interactive Users"), + (2, "Administrators and Interactive Users"), + (3, "Not Defined"), + ), +) +def test_dasd_conversion(pol_info, val, expected): + assert pol_info._dasd_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("Administrators", "0"), + ("Administrators and Power Users", "1"), + ("Administrators and Interactive Users", "2"), + ("Not Defined", "9999"), + ("Plankton", "Invalid Value"), + ), +) +def test_dasd_reverse_conversion(pol_info, val, expected): + assert pol_info._dasd_reverse_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("Not Defined", True), + (None, False), + (1, True), + (3, False), + ("spongebob", False), + ), +) +def test_in_range_inclusive(pol_info, val, expected): + assert pol_info._in_range_inclusive(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("3,1,2", "Not Defined"), + ("3,0", "Silently Succeed"), + ("3,1", "Warn but allow installation"), + ("3,2", "Do not allow installation"), + ("3,Not Defined", "Not Defined"), + ("3,spongebob", "Invalid Value"), + ), +) +def test_driver_signing_reg_conversion(pol_info, val, expected): + assert pol_info._driver_signing_reg_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Defined"), + ("Silently Succeed", "3,0"), + ("Warn but allow installation", f"3,{chr(1)}"), + ("Do not allow installation", f"3,{chr(2)}"), + ("spongebob", "Invalid Value"), + ), +) +def test_driver_signing_reg_reverse_conversion(pol_info, val, expected): + assert pol_info._driver_signing_reg_reverse_conversion(val) == expected + + +# For the next 3 tests we can't use the parametrized decorator because the +# decorator is evaluated before the imports happen, so the HAS_WIN32 is ignored +# and the decorator tries to evaluate the win32security library on systems +# without pyWin32 +def test_sidConversion_no_conversion(pol_info): + val = ws.ConvertStringSidToSid("S-1-5-0") + expected = ["S-1-5-0"] + assert pol_info._sidConversion([val]) == expected + + +def test_sidConversion_everyone(pol_info): + val = ws.ConvertStringSidToSid("S-1-1-0") + expected = ["Everyone"] + assert pol_info._sidConversion([val]) == expected + + +def test_sidConversion_administrator(pol_info): + val = ws.LookupAccountName("", "Administrator")[0] + expected = [f"{socket.gethostname()}\\Administrator"] + assert pol_info._sidConversion([val]) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, None), + ("", ""), + ), +) +def test_usernamesToSidObjects_empty_value(pol_info, val, expected): + assert pol_info._usernamesToSidObjects(val) == expected + + +def test_usernamesToSidObjects_string_list(pol_info): + val = "Administrator,Guest" + admin_sid = ws.LookupAccountName("", "Administrator")[0] + guest_sid = ws.LookupAccountName("", "Guest")[0] + expected = [admin_sid, guest_sid] + assert pol_info._usernamesToSidObjects(val) == expected + + +def test_usernamesToSidObjects_string_list_error(pol_info): + val = "spongebob,squarepants" + with pytest.raises(CommandExecutionError): + pol_info._usernamesToSidObjects(val) + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, "Not Configured"), + ("None", "Not Configured"), + ("true", "Run Windows PowerShell scripts first"), + ("false", "Run Windows PowerShell scripts last"), + ("spongebob", "Invalid Value"), + ), +) +def test_powershell_script_order_conversion(pol_info, val, expected): + assert pol_info._powershell_script_order_conversion(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("Not Configured", None), + ("Run Windows PowerShell scripts first", "true"), + ("Run Windows PowerShell scripts last", "false"), + ("spongebob", "Invalid Value"), + ), +) +def test_powershell_script_order_reverse_conversion(pol_info, val, expected): + assert pol_info._powershell_script_order_reverse_conversion(val) == expected + + +def test_dict_lookup(pol_info): + lookup = { + "spongebob": "squarepants", + "patrick": "squidward", + "plankton": "mr.crabs", + } + assert pol_info._dict_lookup("spongebob", lookup=lookup) == "squarepants" + assert ( + pol_info._dict_lookup("squarepants", lookup=lookup, value_lookup=True) + == "spongebob" + ) + assert pol_info._dict_lookup("homer", lookup=lookup) == "Invalid Value" + assert ( + pol_info._dict_lookup("homer", lookup=lookup, value_lookup=True) + == "Invalid Value" + ) + assert pol_info._dict_lookup("homer") == "Invalid Value" + + +def test_dict_lookup_bitwise_add(pol_info): + lookup = { + 0: "spongebob", + 1: "squarepants", + 2: "patrick", + } + assert pol_info._dict_lookup_bitwise_add("Not Defined") is None + assert ( + pol_info._dict_lookup_bitwise_add("not a list", value_lookup=True) + == "Invalid Value: Not a list" + ) + assert ( + pol_info._dict_lookup_bitwise_add([], value_lookup=True) + == "Invalid Value: No lookup passed" + ) + assert ( + pol_info._dict_lookup_bitwise_add("not an int") == "Invalid Value: Not an int" + ) + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup) == [] + assert ( + pol_info._dict_lookup_bitwise_add( + ["spongebob", "squarepants"], lookup=lookup, value_lookup=True + ) + == 1 + ) + assert pol_info._dict_lookup_bitwise_add(1, lookup=lookup) == ["squarepants"] + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup) == [] + assert pol_info._dict_lookup_bitwise_add(0, lookup=lookup, test_zero=True) == [ + "spongebob" + ] + + +@pytest.mark.parametrize( + "val, expected", + ( + (["list", "of", "items"], ["list", "of", "items"]), + ("Not Defined", None), + ("list,of,items", ["list", "of", "items"]), + (7, "Invalid Value"), + ), +) +def test_multi_string_put_transform(pol_info, val, expected): + assert pol_info._multi_string_put_transform(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + (["list", "of", "items"], ["list", "of", "items"]), + (None, "Not Defined"), + ("list,of,items", "Invalid Value"), + (7, "Invalid Value"), + ), +) +def test_multi_string_get_transform(pol_info, val, expected): + assert pol_info._multi_string_get_transform(val) == expected + + +@pytest.mark.parametrize( + "val, expected", + ( + ("String Item", "String Item"), + ("Not Defined", None), + (7, None), + ), +) +def test_string_put_transform(pol_info, val, expected): + assert pol_info._string_put_transform(val) == expected + + +def test__virtual__(pol_info): + assert win_lgpo.__virtual__() == "lgpo" + with patch("salt.utils.platform.is_windows", return_value=False): + assert win_lgpo.__virtual__() == ( + False, + "win_lgpo: Not a Windows System", + ) + + with patch.object(win_lgpo, "HAS_WINDOWS_MODULES", False): + assert win_lgpo.__virtual__() == ( + False, + "win_lgpo: Required modules failed to load", + ) + + +@pytest.mark.parametrize( + "val, expected", + ( + (None, b"\x00\x00"), + ("spongebob", b"s\x00p\x00o\x00n\x00g\x00e\x00b\x00o\x00b\x00\x00\x00"), + ), +) +def test_encode_string(val, expected): + assert win_lgpo._encode_string(val) == expected + + +def test_encode_string_error(): + with pytest.raises(TypeError): + win_lgpo._encode_string(1) diff --git a/tests/pytests/unit/modules/win_lgpo/test_admx_policies.py b/tests/pytests/unit/modules/win_lgpo/test_admx_policies.py index b78e8059cf9b..f808dbf44676 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_admx_policies.py +++ b/tests/pytests/unit/modules/win_lgpo/test_admx_policies.py @@ -23,6 +23,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] @@ -106,154 +107,6 @@ def lgpo_bin(): yield str(sys_dir / "lgpo.exe") -def test_get_policy_name(osrelease): - if osrelease == "2022Server": - pytest.skip(f"Test is failing on {osrelease}") - if osrelease == "11": - policy_name = "Allow Diagnostic Data" - else: - policy_name = "Allow Telemetry" - result = win_lgpo.get_policy( - policy_name=policy_name, - policy_class="machine", - return_value_only=True, - return_full_policy_names=True, - hierarchical_return=False, - ) - expected = "Not Configured" - assert result == expected - - -def test_get_policy_id(): - result = win_lgpo.get_policy( - policy_name="AllowTelemetry", - policy_class="machine", - return_value_only=True, - return_full_policy_names=True, - hierarchical_return=False, - ) - expected = "Not Configured" - assert result == expected - - -def test_get_policy_name_full_return_full_names(osrelease): - if osrelease == "2022Server": - pytest.skip(f"Test is failing on {osrelease}") - if osrelease == "11": - policy_name = "Allow Diagnostic Data" - else: - policy_name = "Allow Telemetry" - result = win_lgpo.get_policy( - policy_name=policy_name, - policy_class="machine", - return_value_only=False, - return_full_policy_names=True, - hierarchical_return=False, - ) - key = "Windows Components\\Data Collection and Preview Builds\\{}" - expected = {key.format(policy_name): "Not Configured"} - assert result == expected - - -def test_get_policy_id_full_return_full_names(osrelease): - if osrelease == "2022Server": - pytest.skip(f"Test is failing on {osrelease}") - if osrelease == "11": - policy_name = "Allow Diagnostic Data" - else: - policy_name = "Allow Telemetry" - result = win_lgpo.get_policy( - policy_name="AllowTelemetry", - policy_class="machine", - return_value_only=False, - return_full_policy_names=True, - hierarchical_return=False, - ) - key = "Windows Components\\Data Collection and Preview Builds\\{}" - expected = {key.format(policy_name): "Not Configured"} - assert result == expected - - -def test_get_policy_name_full_return_ids(osrelease): - if osrelease == "2022Server": - pytest.skip(f"Test is failing on {osrelease}") - if osrelease == "11": - policy_name = "Allow Diagnostic Data" - else: - policy_name = "Allow Telemetry" - result = win_lgpo.get_policy( - policy_name=policy_name, - policy_class="machine", - return_value_only=False, - return_full_policy_names=False, - hierarchical_return=False, - ) - expected = {"AllowTelemetry": "Not Configured"} - assert result == expected - - -def test_get_policy_id_full_return_ids(): - result = win_lgpo.get_policy( - policy_name="AllowTelemetry", - policy_class="machine", - return_value_only=False, - return_full_policy_names=False, - hierarchical_return=False, - ) - expected = {"AllowTelemetry": "Not Configured"} - assert result == expected - - -def test_get_policy_id_full_return_ids_hierarchical(): - result = win_lgpo.get_policy( - policy_name="AllowTelemetry", - policy_class="machine", - return_value_only=False, - return_full_policy_names=False, - hierarchical_return=True, - ) - expected = { - "Computer Configuration": { - "Administrative Templates": { - "WindowsComponents": { - "DataCollectionAndPreviewBuilds": { - "AllowTelemetry": "Not Configured" - }, - }, - }, - }, - } - assert result == expected - - -def test_get_policy_name_return_full_names_hierarchical(osrelease): - if osrelease == "2022Server": - pytest.skip(f"Test is failing on {osrelease}") - if osrelease == "11": - policy_name = "Allow Diagnostic Data" - else: - policy_name = "Allow Telemetry" - result = win_lgpo.get_policy( - policy_name=policy_name, - policy_class="machine", - return_value_only=False, - return_full_policy_names=True, - hierarchical_return=True, - ) - expected = { - "Computer Configuration": { - "Administrative Templates": { - "Windows Components": { - "Data Collection and Preview Builds": { - policy_name: "Not Configured" - } - } - } - } - } - assert result == expected - - @pytest.mark.destructive_test def test__load_policy_definitions(): """ diff --git a/tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py b/tests/pytests/unit/modules/win_lgpo/test_adv_audit.py similarity index 67% rename from tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py rename to tests/pytests/unit/modules/win_lgpo/test_adv_audit.py index 8ff4796d4158..1f8e83eeab30 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_adv_audit_settings.py +++ b/tests/pytests/unit/modules/win_lgpo/test_adv_audit.py @@ -4,11 +4,14 @@ import salt.modules.win_lgpo as win_lgpo import salt.utils.win_dacl as win_dacl import salt.utils.win_lgpo_auditpol as auditpol +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.destructive_test, + pytest.mark.slow_test, ] @@ -109,7 +112,16 @@ def set_policy(): ) -def _test_adv_auditing(setting, expected): +@pytest.mark.parametrize( + "setting, expected", + [ + ("No Auditing", "0"), + ("Success", "1"), + ("Failure", "2"), + ("Success and Failure", "3"), + ], +) +def test_get_value(setting, expected): """ Helper function to set an audit setting and assert that it was successful """ @@ -119,17 +131,38 @@ def _test_adv_auditing(setting, expected): assert result == expected -def test_no_auditing(disable_legacy_auditing, set_policy): - _test_adv_auditing("No Auditing", "0") - - -def test_success(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Success", "1") - - -def test_failure(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Failure", "2") - +def test_get_defaults(): + patch_context = patch.dict(win_lgpo.__context__, {}) + patch_salt = patch.dict( + win_lgpo.__utils__, {"auditpol.get_auditpol_dump": auditpol.get_auditpol_dump} + ) + with patch_context, patch_salt: + assert "Machine Name" in win_lgpo._get_advaudit_defaults("fieldnames") -def test_success_and_failure(disable_legacy_auditing, clear_policy): - _test_adv_auditing("Success and Failure", "3") + audit_defaults = {"junk": "defaults"} + patch_context = patch.dict( + win_lgpo.__context__, {"lgpo.audit_defaults": audit_defaults} + ) + with patch_context, patch_salt: + assert win_lgpo._get_advaudit_defaults() == audit_defaults + + +def test_set_value_error(): + mock_set_file_data = MagicMock(return_value=False) + with patch.object(win_lgpo, "_set_advaudit_file_data", mock_set_file_data): + with pytest.raises(CommandExecutionError): + win_lgpo._set_advaudit_value("Audit User Account Management", "None") + + +def test_set_value_log_messages(caplog): + mock_set_file_data = MagicMock(return_value=True) + mock_set_pol_data = MagicMock(return_value=False) + mock_context = {"lgpo.adv_audit_data": {"test_option": "test_value"}} + with patch.object( + win_lgpo, "_set_advaudit_file_data", mock_set_file_data + ), patch.object(win_lgpo, "_set_advaudit_pol_data", mock_set_pol_data), patch.dict( + win_lgpo.__context__, mock_context + ): + win_lgpo._set_advaudit_value("test_option", None) + assert "Failed to apply audit setting:" in caplog.text + assert "LGPO: Removing Advanced Audit data:" in caplog.text diff --git a/tests/pytests/unit/modules/win_lgpo/test_defined_policies.py b/tests/pytests/unit/modules/win_lgpo/test_defined_policies.py index 06e91666775a..9acfc0f141c9 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_defined_policies.py +++ b/tests/pytests/unit/modules/win_lgpo/test_defined_policies.py @@ -10,6 +10,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/unit/modules/win_lgpo/test_get_policy.py b/tests/pytests/unit/modules/win_lgpo/test_get_policy.py new file mode 100644 index 000000000000..fb640f97c093 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_get_policy.py @@ -0,0 +1,229 @@ +import copy +import logging +import os +import pathlib + +import pytest + +import salt.grains.core +import salt.modules.win_file as win_file +import salt.modules.win_lgpo as win_lgpo +import salt.utils.files +import salt.utils.win_dacl as win_dacl + +log = logging.getLogger(__name__) + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, +] + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + win_lgpo: { + "__opts__": minion_opts, + "__salt__": { + "file.file_exists": win_file.file_exists, + "file.makedirs": win_file.makedirs_, + }, + }, + win_file: { + "__utils__": { + "dacl.set_perms": win_dacl.set_perms, + }, + }, + } + + +@pytest.fixture(scope="module") +def osrelease(): + grains = salt.grains.core.os_data() + yield grains.get("osrelease", None) + + +@pytest.fixture +def clean_comp(): + reg_pol = pathlib.Path( + os.getenv("SystemRoot"), "System32", "GroupPolicy", "Machine", "Registry.pol" + ) + reg_pol.unlink(missing_ok=True) + try: + yield reg_pol + finally: + reg_pol.unlink(missing_ok=True) + + +@pytest.fixture +def checkbox_policy(): + policy_name = "Configure Corporate Windows Error Reporting" + policy_settings = { + "Connect using SSL": False, + "Corporate server name": "fakeserver.com", + "Only upload on free networks": False, + "Server port": 1273, + } + win_lgpo.set_computer_policy(name=policy_name, setting=copy.copy(policy_settings)) + try: + yield policy_name, policy_settings + finally: + win_lgpo.set_computer_policy(name=policy_name, setting="Not Configured") + + +def test_name(osrelease): + if osrelease == "2022Server": + pytest.skip(f"Test is failing on {osrelease}") + if osrelease == "11": + policy_name = "Allow Diagnostic Data" + else: + policy_name = "Allow Telemetry" + result = win_lgpo.get_policy( + policy_name=policy_name, + policy_class="machine", + return_value_only=True, + return_full_policy_names=True, + hierarchical_return=False, + ) + expected = "Not Configured" + assert result == expected + + +def test_id(): + result = win_lgpo.get_policy( + policy_name="AllowTelemetry", + policy_class="machine", + return_value_only=True, + return_full_policy_names=True, + hierarchical_return=False, + ) + expected = "Not Configured" + assert result == expected + + +def test_name_full_return_full_names(osrelease): + if osrelease == "2022Server": + pytest.skip(f"Test is failing on {osrelease}") + if osrelease == "11": + policy_name = "Allow Diagnostic Data" + else: + policy_name = "Allow Telemetry" + result = win_lgpo.get_policy( + policy_name=policy_name, + policy_class="machine", + return_value_only=False, + return_full_policy_names=True, + hierarchical_return=False, + ) + key = "Windows Components\\Data Collection and Preview Builds\\{}" + expected = {key.format(policy_name): "Not Configured"} + assert result == expected + + +def test_id_full_return_full_names(osrelease): + if osrelease == "2022Server": + pytest.skip(f"Test is failing on {osrelease}") + if osrelease == "11": + policy_name = "Allow Diagnostic Data" + else: + policy_name = "Allow Telemetry" + result = win_lgpo.get_policy( + policy_name="AllowTelemetry", + policy_class="machine", + return_value_only=False, + return_full_policy_names=True, + hierarchical_return=False, + ) + key = "Windows Components\\Data Collection and Preview Builds\\{}" + expected = {key.format(policy_name): "Not Configured"} + assert result == expected + + +def test_name_full_return_ids(osrelease): + if osrelease == "2022Server": + pytest.skip(f"Test is failing on {osrelease}") + if osrelease == "11": + policy_name = "Allow Diagnostic Data" + else: + policy_name = "Allow Telemetry" + result = win_lgpo.get_policy( + policy_name=policy_name, + policy_class="machine", + return_value_only=False, + return_full_policy_names=False, + hierarchical_return=False, + ) + expected = {"AllowTelemetry": "Not Configured"} + assert result == expected + + +def test_id_full_return_ids(): + result = win_lgpo.get_policy( + policy_name="AllowTelemetry", + policy_class="machine", + return_value_only=False, + return_full_policy_names=False, + hierarchical_return=False, + ) + expected = {"AllowTelemetry": "Not Configured"} + assert result == expected + + +def test_id_full_return_ids_hierarchical(): + result = win_lgpo.get_policy( + policy_name="AllowTelemetry", + policy_class="machine", + return_value_only=False, + return_full_policy_names=False, + hierarchical_return=True, + ) + expected = { + "Computer Configuration": { + "Administrative Templates": { + "WindowsComponents": { + "DataCollectionAndPreviewBuilds": { + "AllowTelemetry": "Not Configured" + }, + }, + }, + }, + } + assert result == expected + + +def test_name_return_full_names_hierarchical(osrelease): + if osrelease == "2022Server": + pytest.skip(f"Test is failing on {osrelease}") + if osrelease == "11": + policy_name = "Allow Diagnostic Data" + else: + policy_name = "Allow Telemetry" + result = win_lgpo.get_policy( + policy_name=policy_name, + policy_class="machine", + return_value_only=False, + return_full_policy_names=True, + hierarchical_return=True, + ) + expected = { + "Computer Configuration": { + "Administrative Templates": { + "Windows Components": { + "Data Collection and Preview Builds": { + policy_name: "Not Configured" + } + } + } + } + } + assert result == expected + + +def test_checkboxes(checkbox_policy): + """ + Test scenario where sometimes checkboxes aren't returned in the results + """ + policy_name, expected = checkbox_policy + result = win_lgpo.get_policy(policy_name=policy_name, policy_class="Machine") + assert result == expected diff --git a/tests/pytests/unit/modules/win_lgpo/test_mechanisms.py b/tests/pytests/unit/modules/win_lgpo/test_mechanisms.py index f0c8497497be..b9d659acb5fd 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_mechanisms.py +++ b/tests/pytests/unit/modules/win_lgpo/test_mechanisms.py @@ -16,6 +16,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/unit/modules/win_lgpo/test_netsh.py b/tests/pytests/unit/modules/win_lgpo/test_netsh.py new file mode 100644 index 000000000000..f3b4aef63eb7 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_netsh.py @@ -0,0 +1,135 @@ +import pytest + +import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, + pytest.mark.slow_test, +] + + +@pytest.fixture +def configure_loader_modules(): + return {win_lgpo: {}} + + +def test_get_netsh_value(): + with patch.dict(win_lgpo.__context__, {"lgpo.netsh_data": {"domain": {}}}): + win_lgpo._set_netsh_value("domain", "state", "State", "NotConfigured") + with patch.dict(win_lgpo.__context__, {}): + assert win_lgpo._get_netsh_value("domain", "State") == "NotConfigured" + + context = { + "lgpo.netsh_data": { + "domain": { + "State": "ONContext", + "Inbound": "NotConfigured", + "Outbound": "NotConfigured", + "LocalFirewallRules": "NotConfigured", + }, + }, + } + with patch.dict(win_lgpo.__context__, context): + assert win_lgpo._get_netsh_value("domain", "State") == "ONContext" + + +def test_set_value_error(): + with pytest.raises(ValueError): + win_lgpo._set_netsh_value("domain", "bad_section", "junk", "junk") + + +def test_set_value_firewall(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_firewall_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="firewallpolicy", + option="Inbound", + value="spongebob", + ) + mock.assert_called_once_with( + profile="domain", + inbound="spongebob", + outbound=None, + store="lgpo", + ) + + +def test_set_value_settings(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="settings", + option="spongebob", + value="squarepants", + ) + mock.assert_called_once_with( + profile="domain", + setting="spongebob", + value="squarepants", + store="lgpo", + ) + + +def test_set_value_state(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch("salt.utils.win_lgpo_netsh.set_state", MagicMock()) as mock, patch.dict( + win_lgpo.__context__, mock_context + ): + win_lgpo._set_netsh_value( + profile="domain", + section="state", + option="junk", + value="spongebob", + ) + mock.assert_called_once_with( + profile="domain", + state="spongebob", + store="lgpo", + ) + + +def test_set_value_logging_filename(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_logging_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="logging", + option="FileName", + value="Not configured", + ) + mock.assert_called_once_with( + profile="domain", + setting="FileName", + value="notconfigured", + store="lgpo", + ) + + +def test_set_value_logging_log(): + mock_context = {"lgpo.netsh_data": {"domain": "junk"}} + with patch( + "salt.utils.win_lgpo_netsh.set_logging_settings", MagicMock() + ) as mock, patch.dict(win_lgpo.__context__, mock_context): + win_lgpo._set_netsh_value( + profile="domain", + section="logging", + option="LogSpongebob", + value="Junk", + ) + mock.assert_called_once_with( + profile="domain", + setting="Spongebob", + value="Junk", + store="lgpo", + ) diff --git a/tests/pytests/unit/modules/win_lgpo/test_point_print_enabled.py b/tests/pytests/unit/modules/win_lgpo/test_point_print_enabled.py index 258a7a49049e..88b1c05b4e72 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_point_print_enabled.py +++ b/tests/pytests/unit/modules/win_lgpo/test_point_print_enabled.py @@ -13,6 +13,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/unit/modules/win_lgpo/test_point_print_nc.py b/tests/pytests/unit/modules/win_lgpo/test_point_print_nc.py index edfec83b3e8a..174ca38fb506 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_point_print_nc.py +++ b/tests/pytests/unit/modules/win_lgpo/test_point_print_nc.py @@ -20,6 +20,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/unit/modules/win_lgpo/test_policy_info.py b/tests/pytests/unit/modules/win_lgpo/test_policy_info.py index 6ecb434a5bc3..d0ed3c911a3d 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_policy_info.py +++ b/tests/pytests/unit/modules/win_lgpo/test_policy_info.py @@ -5,10 +5,12 @@ import salt.modules.win_file as win_file import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] @@ -41,6 +43,18 @@ def test_get_policy_name(): assert result == expected +def test_get_adml_display_name_bad_name(): + result = win_lgpo._getAdmlDisplayName("junk", "spongbob") + assert result is None + + +def test_get_adml_display_name_no_results(): + patch_xpath = patch.object(win_lgpo, "ADML_DISPLAY_NAME_XPATH", return_value=[]) + with patch_xpath: + result = win_lgpo._getAdmlDisplayName("junk", "$(spongbob.squarepants)") + assert result is None + + def test_get_policy_id(): result = win_lgpo.get_policy( policy_name="WfwPublicSettingsNotification", @@ -155,3 +169,78 @@ def test_get_policy_id_full_return_full_names_hierarchical(): } } assert result == expected + + +def test_transform_value_missing_type(): + policy = {"Transform": {"some_type": "junk"}} + result = win_lgpo._transform_value( + value="spongebob", + policy=policy, + transform_type="different_type", + ) + assert result == "spongebob" + + +def test_transform_value_registry(): + policy = {"Registry": {}} + result = win_lgpo._transform_value( + value="spongebob", + policy=policy, + transform_type="different_type", + ) + assert result == "spongebob" + + +def test_transform_value_registry_not_set(): + policy = {"Registry": {}} + result = win_lgpo._transform_value( + value="(value not set)", + policy=policy, + transform_type="different_type", + ) + assert result == "Not Defined" + + +def test_validate_setting_not_in_list(): + policy = {"Settings": ["junk"]} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert not result + + +def test_validate_setting_in_list(): + policy = {"Settings": ["spongebob"]} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert result + + +def test_validate_setting_not_list_or_dict(): + policy = {"Settings": "spongebob"} + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert result + + +def test_add_account_rights_error(): + patch_w32sec = patch( + "win32security.LsaOpenPolicy", MagicMock(side_effect=Exception) + ) + with patch_w32sec: + assert win_lgpo._addAccountRights("spongebob", "junk") is False + + +def test_del_account_rights_error(): + patch_w32sec = patch( + "win32security.LsaOpenPolicy", MagicMock(side_effect=Exception) + ) + with patch_w32sec: + assert win_lgpo._delAccountRights("spongebob", "junk") is False + + +def test_validate_setting_no_function(): + policy = { + "Settings": { + "Function": "_in_range_inclusive", + "Args": {"min": 0, "max": 24}, + }, + } + result = win_lgpo._validateSetting(value="spongebob", policy=policy) + assert not result diff --git a/tests/pytests/unit/modules/win_lgpo/test_policy_info_functions.py b/tests/pytests/unit/modules/win_lgpo/test_policy_info_functions.py index 37125644955c..bedacc237fb1 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_policy_info_functions.py +++ b/tests/pytests/unit/modules/win_lgpo/test_policy_info_functions.py @@ -9,6 +9,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/unit/modules/win_lgpo/test_policy_resources.py b/tests/pytests/unit/modules/win_lgpo/test_policy_resources.py index 0c91de7ae193..b7a6618f47ed 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_policy_resources.py +++ b/tests/pytests/unit/modules/win_lgpo/test_policy_resources.py @@ -13,6 +13,7 @@ pytestmark = [ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, ] diff --git a/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py b/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py new file mode 100644 index 000000000000..79c8a10393c9 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_reg_pol.py @@ -0,0 +1,53 @@ +""" +:codeauthor: Shane Lee +""" +import pytest + +import salt.modules.win_lgpo as win_lgpo + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.slow_test, +] + + +@pytest.fixture +def reg_pol_dword(): + data = ( + b"PReg\x01\x00\x00\x00" # Header + b"[\x00" # Opening list of policies + b"S\x00o\x00m\x00e\x00\\\x00K\x00e\x00y\x00\x00\x00;\x00" # Key + b"V\x00a\x00l\x00u\x00e\x00N\x00a\x00m\x00e\x00\x00\x00;\x00" # Value + b"\x04\x00\x00\x00;\x00" # Reg DWord Type + b"\x04\x00\x00\x00;\x00" # Size + # b"\x01\x00\x00\x00" # Reg Dword Data + b"\x00\x00\x00\x00" # No Data + b"]\x00" # Closing list of policies + ) + yield data + + +def test_get_data_from_reg_pol_data(reg_pol_dword): + encoded_name = "ValueName".encode("utf-16-le") + encoded_null = chr(0).encode("utf-16-le") + encoded_semicolon = ";".encode("utf-16-le") + encoded_type = chr(4).encode("utf-16-le") + encoded_size = chr(4).encode("utf-16-le") + search_string = b"".join( + [ + encoded_semicolon, + encoded_name, + encoded_null, + encoded_semicolon, + encoded_type, + encoded_null, + encoded_semicolon, + encoded_size, + encoded_null, + ] + ) + result = win_lgpo._getDataFromRegPolData( + search_string, reg_pol_dword, return_value_name=True + ) + assert result == {"ValueName": 0} diff --git a/tests/pytests/unit/modules/win_lgpo/test_secedit.py b/tests/pytests/unit/modules/win_lgpo/test_secedit.py new file mode 100644 index 000000000000..47a39fb82503 --- /dev/null +++ b/tests/pytests/unit/modules/win_lgpo/test_secedit.py @@ -0,0 +1,83 @@ +import pytest + +import salt.modules.cmdmod as cmd +import salt.modules.win_file as win_file +import salt.modules.win_lgpo as win_lgpo +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.windows_whitelisted, + pytest.mark.skip_unless_on_windows, + pytest.mark.destructive_test, + pytest.mark.slow_test, +] + + +@pytest.fixture +def configure_loader_modules(tmp_path): + cachedir = tmp_path / "__test_admx_policy_cache_dir" + cachedir.mkdir(parents=True, exist_ok=True) + return { + win_lgpo: { + "__salt__": { + "cmd.run": cmd.run, + "file.file_exists": win_file.file_exists, + "file.remove": win_file.remove, + }, + "__opts__": { + "cachedir": str(cachedir), + }, + }, + } + + +def test_load_secedit_data(): + result = win_lgpo._load_secedit_data() + result = [x.strip() for x in result] + assert "[Unicode]" in result + assert "[System Access]" in result + + +def test_get_secedit_data(): + with patch.dict(win_lgpo.__context__, {}): + result = win_lgpo._get_secedit_data() + result = [x.strip() for x in result] + assert "[Unicode]" in result + assert "[System Access]" in result + + +def test_get_secedit_data_existing_context(): + mock_context = {"lgpo.secedit_data": ["spongebob", "squarepants"]} + with patch.dict(win_lgpo.__context__, mock_context): + result = win_lgpo._get_secedit_data() + result = [x.strip() for x in result] + assert "spongebob" in result + assert "squarepants" in result + + +def test_get_secedit_value(): + result = win_lgpo._get_secedit_value("AuditDSAccess") + assert result == "0" + + +def test_get_secedit_value_not_defined(): + result = win_lgpo._get_secedit_value("Spongebob") + assert result == "Not Defined" + + +def test_write_secedit_data_import_fail(caplog): + patch_cmd_retcode = patch.dict( + win_lgpo.__salt__, {"cmd.retcode": MagicMock(return_value=1)} + ) + with patch_cmd_retcode: + assert win_lgpo._write_secedit_data("spongebob") is False + assert "Secedit failed to import template data" in caplog.text + + +def test_write_secedit_data_configure_fail(caplog): + patch_cmd_retcode = patch.dict( + win_lgpo.__salt__, {"cmd.retcode": MagicMock(side_effect=[0, 1])} + ) + with patch_cmd_retcode: + assert win_lgpo._write_secedit_data("spongebob") is False + assert "Secedit failed to apply security database" in caplog.text diff --git a/tests/pytests/unit/modules/win_lgpo/test_secedit_policy.py b/tests/pytests/unit/modules/win_lgpo/test_secedit_policy.py index d797878b799c..6a0d2c027c54 100644 --- a/tests/pytests/unit/modules/win_lgpo/test_secedit_policy.py +++ b/tests/pytests/unit/modules/win_lgpo/test_secedit_policy.py @@ -12,6 +12,7 @@ pytest.mark.windows_whitelisted, pytest.mark.skip_unless_on_windows, pytest.mark.destructive_test, + pytest.mark.slow_test, ] diff --git a/tests/pytests/unit/pillar/test_pillar.py b/tests/pytests/unit/pillar/test_pillar.py index 75603aa0fe4b..11eda34318be 100644 --- a/tests/pytests/unit/pillar/test_pillar.py +++ b/tests/pytests/unit/pillar/test_pillar.py @@ -7,6 +7,7 @@ import salt.pillar import salt.utils.cache from salt.utils.odict import OrderedDict +from tests.support.mock import MagicMock @pytest.mark.parametrize( @@ -157,3 +158,20 @@ def test_pillar_get_cache_disk(temp_salt_minion, caplog): in caplog.messages ) assert fresh_pillar == {} + + +def test_remote_pillar_timeout(temp_salt_minion, tmp_path): + opts = temp_salt_minion.config.copy() + opts["master_uri"] = "tcp://127.0.0.1:12323" + grains = salt.loader.grains(opts) + pillar = salt.pillar.RemotePillar( + opts, + grains, + temp_salt_minion.id, + "base", + ) + mock = MagicMock() + mock.side_effect = salt.exceptions.SaltReqTimeoutError() + pillar.channel.crypted_transfer_decode_dictentry = mock + with pytest.raises(salt.exceptions.SaltClientError): + pillar.compile_pillar() diff --git a/tests/pytests/unit/roster/test_ansible.py b/tests/pytests/unit/roster/test_ansible.py index 7b96e6e642cc..9c88a87ea419 100644 --- a/tests/pytests/unit/roster/test_ansible.py +++ b/tests/pytests/unit/roster/test_ansible.py @@ -62,6 +62,28 @@ def expected_targets_return(): } +@pytest.fixture +def expected_docs_targets_return(): + return { + "home": { + "passwd": "password", + "sudo": "password", + "host": "12.34.56.78", + "port": 23, + "user": "gtmanfred", + "minion_opts": {"http_port": 80}, + }, + "salt.gtmanfred.com": { + "passwd": "password", + "sudo": "password", + "host": "127.0.0.1", + "port": 22, + "user": "gtmanfred", + "minion_opts": {"http_port": 80}, + }, + } + + @pytest.fixture(scope="module") def roster_dir(tmp_path_factory): dpath = tmp_path_factory.mktemp("roster") @@ -136,6 +158,59 @@ def roster_dir(tmp_path_factory): children: southeast: """ + docs_ini_contents = """ + [servers] + salt.gtmanfred.com ansible_ssh_user=gtmanfred ansible_ssh_host=127.0.0.1 ansible_ssh_port=22 ansible_ssh_pass='password' ansible_sudo_pass='password' + + [desktop] + home ansible_ssh_user=gtmanfred ansible_ssh_host=12.34.56.78 ansible_ssh_port=23 ansible_ssh_pass='password' ansible_sudo_pass='password' + + [computers:children] + desktop + servers + + [computers:vars] + http_port=80 + """ + docs_script_contents = """ + #!/bin/bash + echo '{ + "servers": [ + "salt.gtmanfred.com" + ], + "desktop": [ + "home" + ], + "computers": { + "hosts": [], + "children": [ + "desktop", + "servers" + ], + "vars": { + "http_port": 80 + } + }, + "_meta": { + "hostvars": { + "salt.gtmanfred.com": { + "ansible_ssh_user": "gtmanfred", + "ansible_ssh_host": "127.0.0.1", + "ansible_sudo_pass": "password", + "ansible_ssh_pass": "password", + "ansible_ssh_port": 22 + }, + "home": { + "ansible_ssh_user": "gtmanfred", + "ansible_ssh_host": "12.34.56.78", + "ansible_sudo_pass": "password", + "ansible_ssh_pass": "password", + "ansible_ssh_port": 23 + } + } + } + }' + """ with pytest.helpers.temp_file( "roster.py", roster_py_contents, directory=dpath ) as py_roster: @@ -144,11 +219,17 @@ def roster_dir(tmp_path_factory): "roster.ini", roster_ini_contents, directory=dpath ), pytest.helpers.temp_file( "roster.yml", roster_yaml_contents, directory=dpath + ), pytest.helpers.temp_file( + "roster-docs.ini", docs_ini_contents, directory=dpath ): - try: - yield dpath - finally: - shutil.rmtree(str(dpath), ignore_errors=True) + with pytest.helpers.temp_file( + "roster-docs.sh", docs_script_contents, directory=dpath + ) as script_roster: + script_roster.chmod(0o755) + try: + yield dpath + finally: + shutil.rmtree(str(dpath), ignore_errors=True) @pytest.mark.parametrize( @@ -179,3 +260,17 @@ def test_script(roster_opts, roster_dir, expected_targets_return): with patch.dict(ansible.__opts__, roster_opts): ret = ansible.targets("*") assert ret == expected_targets_return + + +def test_docs_ini(roster_opts, roster_dir, expected_docs_targets_return): + roster_opts["roster_file"] = str(roster_dir / "roster-docs.ini") + with patch.dict(ansible.__opts__, roster_opts): + ret = ansible.targets("*") + assert ret == expected_docs_targets_return + + +def test_docs_script(roster_opts, roster_dir, expected_docs_targets_return): + roster_opts["roster_file"] = str(roster_dir / "roster-docs.sh") + with patch.dict(ansible.__opts__, roster_opts): + ret = ansible.targets("*") + assert ret == expected_docs_targets_return diff --git a/tests/pytests/unit/roster/test_terraform.py b/tests/pytests/unit/roster/test_terraform.py index 730c640fab25..b79d7985461e 100644 --- a/tests/pytests/unit/roster/test_terraform.py +++ b/tests/pytests/unit/roster/test_terraform.py @@ -27,10 +27,6 @@ def pki_dir(): @pytest.fixture def configure_loader_modules(roster_file, pki_dir): - # opts = salt.config.master_config( - # os.path.join(RUNTIME_VARS.TMP_CONF_DIR, "master") - # ) - # utils = salt.loader.utils(opts, whitelist=["roster_matcher"]) return { terraform: { "__utils__": { diff --git a/tests/pytests/unit/state/test_global_state_conditions.py b/tests/pytests/unit/state/test_global_state_conditions.py index e7d812f6b68c..a866e60aa244 100644 --- a/tests/pytests/unit/state/test_global_state_conditions.py +++ b/tests/pytests/unit/state/test_global_state_conditions.py @@ -8,6 +8,11 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.core_test, +] + + @pytest.fixture def minion_config(minion_opts): minion_opts["file_client"] = "local" diff --git a/tests/pytests/unit/state/test_multi_env_highstate.py b/tests/pytests/unit/state/test_multi_env_highstate.py index d1ae83fabc22..f270a31a187f 100644 --- a/tests/pytests/unit/state/test_multi_env_highstate.py +++ b/tests/pytests/unit/state/test_multi_env_highstate.py @@ -15,6 +15,11 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.core_test, +] + + @pytest.fixture def root_dir(tmp_path): return str(tmp_path / "root_dir") diff --git a/tests/pytests/unit/state/test_reactor_compiler.py b/tests/pytests/unit/state/test_reactor_compiler.py new file mode 100644 index 000000000000..d0f03fbfdb78 --- /dev/null +++ b/tests/pytests/unit/state/test_reactor_compiler.py @@ -0,0 +1,669 @@ +import logging + +import pytest + +import salt.minion +import salt.state +from salt.utils.odict import OrderedDict +from tests.support.mock import MagicMock, patch + +log = logging.getLogger(__name__) + + +pytestmark = [ + pytest.mark.core_test, +] + + +def test_compiler_render_template(minion_opts, tmp_path): + """ + Test Compiler.render_template + """ + minion = "poc-minion" + kwargs = { + "tag": f"salt/minion/{minion}/start", + "data": { + "id": minion, + "cmd": "_minion_event", + "pretag": None, + "data": f"Minion {minion} started at Thu Sep 14 07:31:04 2023", + "tag": f"salt/minion/{minion}/start", + "_stamp": "2023-09-14T13:31:05.000316", + }, + } + + reactor_file = tmp_path / "reactor.sls" + content = f""" + highstate_run: + local.state.apply: + - tgt: {minion} + - args: + - mods: test + """ + with salt.utils.files.fopen(reactor_file, "w") as fp: + fp.write(content) + + mminion = salt.minion.MasterMinion(minion_opts) + comp = salt.state.Compiler(minion_opts, mminion.rend) + ret = comp.render_template(template=str(reactor_file), kwargs=kwargs) + assert ret["highstate_run"]["local"][0]["tgt"] == minion + assert ret["highstate_run"]["local"][1]["args"][0]["mods"] == "test" + + +def test_compiler_render_template_doesnotexist(minion_opts, tmp_path): + """ + Test Compiler.render_template when + the reactor file does not exist + """ + minion = "poc-minion" + kwargs = { + "tag": f"salt/minion/{minion}/start", + "data": { + "id": minion, + "cmd": "_minion_event", + "pretag": None, + "data": f"Minion {minion} started at Thu Sep 14 07:31:04 2023", + "tag": f"salt/minion/{minion}/start", + "_stamp": "2023-09-14T13:31:05.000316", + }, + } + + reactor_file = tmp_path / "reactor.sls" + mminion = salt.minion.MasterMinion(minion_opts) + comp = salt.state.Compiler(minion_opts, mminion.rend) + mock_pad = MagicMock(return_value=None) + patch_pad = patch.object(comp, "pad_funcs", mock_pad) + with patch_pad: + ret = comp.render_template(template=str(reactor_file), kwargs=kwargs) + assert ret == {} + mock_pad.assert_not_called() + + +def test_compiler_pad_funcs(minion_opts, tmp_path): + """ + Test Compiler.pad_funcs + """ + high = OrderedDict( + [ + ( + "highstate_run", + OrderedDict( + [ + ( + "local.state.apply", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [("args", [OrderedDict([("mods", "test")])])] + ), + ], + ) + ] + ), + ) + ] + ) + + exp = OrderedDict( + [ + ( + "highstate_run", + OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [("args", [OrderedDict([("mods", "test")])])] + ), + "state.apply", + ], + ) + ] + ), + ) + ] + ) + mminion = salt.minion.MasterMinion(minion_opts) + comp = salt.state.Compiler(minion_opts, mminion.rend) + ret = comp.pad_funcs(high) + assert ret == exp + + +def test_compiler_pad_funcs_short_sls(minion_opts, tmp_path): + """ + Test Compiler.pad_funcs when using a shorter + sls with no extra arguments + """ + high = OrderedDict([("master_pub", "wheel.key.master_key_str")]) + exp = OrderedDict([("master_pub", {"wheel": ["key.master_key_str"]})]) + + mminion = salt.minion.MasterMinion(minion_opts) + comp = salt.state.Compiler(minion_opts, mminion.rend) + ret = comp.pad_funcs(high) + assert ret == exp + + +@pytest.mark.parametrize( + "high,exp", + [ + ( + { + "master_pub": { + "wheel": ["key.master_key_str"], + "__sls__": "/srv/reactor/start.sls", + } + }, + [], + ), + (set(), ["High data is not a dictionary and is invalid"]), + ( + { + 1234: { + "wheel": ["key.master_key_str"], + "__sls__": "/srv/reactor/start.sls", + } + }, + [ + "ID '1234' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a int. It may need to be quoted" + ], + ), + ( + { + b"test": { + "wheel": ["key.master_key_str"], + "__sls__": "/srv/reactor/start.sls", + } + }, + [ + "ID 'b'test'' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a bytes. It may need to be quoted" + ], + ), + ( + { + True: { + "wheel": ["key.master_key_str"], + "__sls__": "/srv/reactor/start.sls", + } + }, + [ + "ID 'True' in SLS '/srv/reactor/start.sls' is not formed as a string, but is a bool. It may need to be quoted" + ], + ), + ( + {"master_pub": ["wheel", "key.master_key_str"]}, + [ + "The type master_pub in ['wheel', 'key.master_key_str'] is not formatted as a dictionary" + ], + ), + ( + { + "master_pub": { + "wheel": {"key.master_key_str"}, + "__sls__": "/srv/reactor/start.sls", + } + }, + [ + "State 'master_pub' in SLS '/srv/reactor/start.sls' is not formed as a list" + ], + ), + ( + { + "master_pub": { + "wheel": ["key. master_key_str"], + "__sls__": "/srv/reactor/start.sls", + } + }, + [ + 'The function "key. master_key_str" in state "master_pub" in SLS "/srv/reactor/start.sls" has whitespace, a function with whitespace is not supported, perhaps this is an argument that is missing a ":"' + ], + ), + ( + { + "master_pub": { + "wheel": ["key.master_key_str "], + "__sls__": "/srv/reactor/start.sls", + } + }, + [], + ), + ], +) +def test_compiler_verify_high_short_sls(minion_opts, tmp_path, high, exp): + """ + Test Compiler.verify_high when using + a shorter sls with know extra arguments + """ + mminion = salt.minion.MasterMinion(minion_opts) + comp = salt.state.Compiler(minion_opts, mminion.rend) + ret = comp.verify_high(high) + # empty is successful. Means we have no errors + assert ret == exp + + +@pytest.mark.parametrize( + "high,exp", + [ + ( + { + "add_test_1": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test1")] + ) + ], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + "add_test_2": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test2")] + ) + ], + ) + ] + ), + OrderedDict( + [ + ( + "require", + [OrderedDict([("local", "add_test_1")])], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + }, + [], + ), + ( + { + "add_test_1": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test1")] + ) + ], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + "add_test_2": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test2")] + ) + ], + ) + ] + ), + OrderedDict([("require", {"local": "add_test_1"})]), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + }, + [ + "The require statement in state 'add_test_2' in SLS '/srv/reactor/start.sls' needs to be formed as a list" + ], + ), + ( + { + "add_test_1": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test1")] + ) + ], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + "add_test_2": OrderedDict( + [ + ( + "local.cmd.run", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test2")] + ) + ], + ) + ] + ), + OrderedDict([("require", {"local": "add_test_1"})]), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + }, + [ + "The require statement in state 'add_test_2' in SLS '/srv/reactor/start.sls' needs to be formed as a list", + "Too many functions declared in state 'add_test_2' in SLS '/srv/reactor/start.sls'. Please choose one of the following: cmd.run, cmd.run", + ], + ), + ( + { + "add_test_1": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [("args", ([("cmd", "touch /tmp/test1")]))] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + "add_test_2": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test2")] + ) + ], + ) + ] + ), + OrderedDict([("require", ([("local", "add_test_1")]))]), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + }, + [ + "Requisite declaration ('local', 'add_test_1') in SLS /srv/reactor/start.sls is not formed as a single key dictionary" + ], + ), + ( + { + "add_test_1": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test1")] + ) + ], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + "add_test_2": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test2")] + ) + ], + ) + ] + ), + OrderedDict( + [ + ( + "require", + [ + OrderedDict( + [("local", (["add_test_1"]))] + ) + ], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + }, + ["Illegal requisite \"['add_test_1']\", is SLS /srv/reactor/start.sls\n"], + ), + ( + { + "add_test_1": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test1")] + ) + ], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + "add_test_2": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test2")] + ) + ], + ) + ] + ), + OrderedDict( + [ + ( + "require", + [OrderedDict([("local", "add_test_2")])], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + }, + [ + 'A recursive requisite was found, SLS "/srv/reactor/start.sls" ID "add_test_2" ID "add_test_2"' + ], + ), + ( + { + "add_test_1": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test1")] + ) + ], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + "add_test_2": OrderedDict( + [ + ( + "local", + [ + OrderedDict([("tgt", "poc-minion")]), + OrderedDict( + [ + ( + "args", + [ + OrderedDict( + [("cmd", "touch /tmp/test2")] + ) + ], + ) + ] + ), + OrderedDict( + [ + ( + "require", + [OrderedDict([("local", "add_test_1")])], + ) + ] + ), + "cmd.run", + ], + ), + ("__sls__", "/srv/reactor/start.sls"), + ] + ), + }, + [], + ), + ], +) +def test_compiler_verify_high_sls_requisites(minion_opts, tmp_path, high, exp): + """ + Test Compiler.verify_high when using + a sls with requisites + """ + mminion = salt.minion.MasterMinion(minion_opts) + comp = salt.state.Compiler(minion_opts, mminion.rend) + ret = comp.verify_high(high) + # empty is successful. Means we have no errors + assert ret == exp diff --git a/tests/pytests/unit/state/test_state_basic.py b/tests/pytests/unit/state/test_state_basic.py new file mode 100644 index 000000000000..a8a56fba82b2 --- /dev/null +++ b/tests/pytests/unit/state/test_state_basic.py @@ -0,0 +1,201 @@ +""" +Test functions in state.py that are not a part of a class +""" +import pytest + +import salt.state +from salt.utils.odict import OrderedDict + +pytestmark = [ + pytest.mark.core_test, +] + + +def test_state_args(): + """ + Testing state.state_args when this state is being used: + + /etc/foo.conf: + file.managed: + - contents: "blah" + - mkdirs: True + - user: ch3ll + - group: ch3ll + - mode: 755 + + /etc/bar.conf: + file.managed: + - use: + - file: /etc/foo.conf + """ + id_ = "/etc/bar.conf" + state = "file" + high = OrderedDict( + [ + ( + "/etc/foo.conf", + OrderedDict( + [ + ( + "file", + [ + OrderedDict([("contents", "blah")]), + OrderedDict([("mkdirs", True)]), + OrderedDict([("user", "ch3ll")]), + OrderedDict([("group", "ch3ll")]), + OrderedDict([("mode", 755)]), + "managed", + {"order": 10000}, + ], + ), + ("__sls__", "test"), + ("__env__", "base"), + ] + ), + ), + ( + "/etc/bar.conf", + OrderedDict( + [ + ( + "file", + [ + OrderedDict( + [ + ( + "use", + [OrderedDict([("file", "/etc/foo.conf")])], + ) + ] + ), + "managed", + {"order": 10001}, + ], + ), + ("__sls__", "test"), + ("__env__", "base"), + ] + ), + ), + ] + ) + ret = salt.state.state_args(id_, state, high) + assert ret == {"order", "use"} + + +def test_state_args_id_not_high(): + """ + Testing state.state_args when id_ is not in high + """ + id_ = "/etc/bar.conf2" + state = "file" + high = OrderedDict( + [ + ( + "/etc/foo.conf", + OrderedDict( + [ + ( + "file", + [ + OrderedDict([("contents", "blah")]), + OrderedDict([("mkdirs", True)]), + OrderedDict([("user", "ch3ll")]), + OrderedDict([("group", "ch3ll")]), + OrderedDict([("mode", 755)]), + "managed", + {"order": 10000}, + ], + ), + ("__sls__", "test"), + ("__env__", "base"), + ] + ), + ), + ( + "/etc/bar.conf", + OrderedDict( + [ + ( + "file", + [ + OrderedDict( + [ + ( + "use", + [OrderedDict([("file", "/etc/foo.conf")])], + ) + ] + ), + "managed", + {"order": 10001}, + ], + ), + ("__sls__", "test"), + ("__env__", "base"), + ] + ), + ), + ] + ) + ret = salt.state.state_args(id_, state, high) + assert ret == set() + + +def test_state_args_state_not_high(): + """ + Testing state.state_args when state is not in high date + """ + id_ = "/etc/bar.conf" + state = "file2" + high = OrderedDict( + [ + ( + "/etc/foo.conf", + OrderedDict( + [ + ( + "file", + [ + OrderedDict([("contents", "blah")]), + OrderedDict([("mkdirs", True)]), + OrderedDict([("user", "ch3ll")]), + OrderedDict([("group", "ch3ll")]), + OrderedDict([("mode", 755)]), + "managed", + {"order": 10000}, + ], + ), + ("__sls__", "test"), + ("__env__", "base"), + ] + ), + ), + ( + "/etc/bar.conf", + OrderedDict( + [ + ( + "file", + [ + OrderedDict( + [ + ( + "use", + [OrderedDict([("file", "/etc/foo.conf")])], + ) + ] + ), + "managed", + {"order": 10001}, + ], + ), + ("__sls__", "test"), + ("__env__", "base"), + ] + ), + ), + ] + ) + ret = salt.state.state_args(id_, state, high) + assert ret == set() diff --git a/tests/pytests/unit/state/test_state_compiler.py b/tests/pytests/unit/state/test_state_compiler.py index 3ef63b8ad299..4aaf3b292364 100644 --- a/tests/pytests/unit/state/test_state_compiler.py +++ b/tests/pytests/unit/state/test_state_compiler.py @@ -17,6 +17,11 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.core_test, +] + + def test_format_log_non_ascii_character(): """ Tests running a non-ascii character through the state.format_log @@ -33,7 +38,16 @@ def test_format_log_non_ascii_character(): salt.state.format_log(ret) -@pytest.mark.slow_test +def test_format_log_list(caplog): + """ + Test running format_log when ret is not a dictionary + """ + ret = ["test1", "test2"] + salt.state.format_log(ret) + assert "INFO" in caplog.text + assert f"{ret}" in caplog.text + + def test_render_error_on_invalid_requisite(minion_opts): """ Test that the state compiler correctly deliver a rendering @@ -1038,3 +1052,287 @@ def test_mod_aggregate(minion_opts): # Ensure pkgs were aggregated assert low_ret["pkgs"] == ["figlet", "sl"] + + +def test_verify_onlyif_cmd_opts_exclude(minion_opts): + """ + Verify cmd.run state arguments are properly excluded from cmd.retcode + when passed. + """ + low_data = { + "onlyif": "somecommand", + "cmd_opts_exclude": ["shell"], + "cwd": "acwd", + "root": "aroot", + "env": [{"akey": "avalue"}], + "prepend_path": "apath", + "umask": "0700", + "success_retcodes": 1, + "timeout": 5, + "runas": "doesntexist", + "name": "echo something", + "shell": "/bin/dash", + "state": "cmd", + "__id__": "this is just a test", + "fun": "run", + "__env__": "base", + "__sls__": "sometest", + "order": 10000, + } + + with patch("salt.state.State._gather_pillar"): + state_obj = salt.state.State(minion_opts) + mock = MagicMock() + with patch.dict(state_obj.functions, {"cmd.retcode": mock}): + # The mock handles the exception, but the runas dict is being passed as it would actually be + return_result = state_obj._run_check(low_data) + mock.assert_called_once_with( + "somecommand", + ignore_retcode=True, + python_shell=True, + cwd="acwd", + root="aroot", + runas="doesntexist", + env=[{"akey": "avalue"}], + prepend_path="apath", + umask="0700", + timeout=5, + success_retcodes=1, + ) + + +@pytest.mark.parametrize("verifier", (salt.state.State, salt.state.Compiler)) +@pytest.mark.parametrize( + "high,err_msg", + ( + ( + {"/some/file": {"file.managed": ["source:salt://bla"]}}, + "Too many functions declared in state '/some/file' in SLS 'sls'. Please choose one of the following: managed, source:salt://bla", + ), + ( + {"/some/file": {"file": ["managed", "source:salt://bla"]}}, + "Too many functions declared in state '/some/file' in SLS 'sls'. Please choose one of the following: managed, source:salt://bla", + ), + ), +) +def test_verify_high_too_many_functions_declared_error_message( + high, err_msg, minion_opts, verifier +): + """ + Ensure the error message when a list item of a state call is + accidentally passed as a string instead of a single-item dict + is more meaningful. Example: + + /some/file: + file.managed: + - source:salt://bla + + /some/file: + file: + - managed + - source:salt://bla + + Issue #38098. + """ + high[next(iter(high))]["__sls__"] = "sls" + with patch("salt.state.State._gather_pillar"): + if verifier is salt.state.Compiler: + state_obj = verifier(minion_opts, []) + else: + state_obj = verifier(minion_opts) + res = state_obj.verify_high(high) + assert isinstance(res, list) + assert any(err_msg in x for x in res) + + +def test_load_modules_pkg(minion_opts): + """ + Test load_modules when using this state: + nginx: + pkg.installed: + - provider: pacmanpkg + """ + data = { + "state": "pkg", + "name": "nginx", + "__sls__": "test", + "__env__": "base", + "__id__": "nginx", + "provider": "pacmanpkg", + "order": 10000, + "fun": "installed", + } + with patch("salt.state.State._gather_pillar"): + state_obj = salt.state.State(minion_opts) + state_obj.load_modules(data) + for func in [ + "pkg.available_version", + "pkg.file_list", + "pkg.group_diff", + "pkg.group_info", + ]: + assert func in state_obj.functions + + +def test_load_modules_list(minion_opts): + """ + Test load_modules when using providers in state + as a list, with this state: + nginx: + pkg.installed: + - provider: + - cmd: cmdmod + """ + data = { + "state": "pkg", + "name": "nginx", + "__sls__": "test", + "__env__": "base", + "__id__": "nginx", + "provider": [OrderedDict([("cmd", "cmdmod")])], + "order": 10000, + "fun": "installed", + } + with patch("salt.state.State._gather_pillar"): + state_obj = salt.state.State(minion_opts) + state_obj.load_modules(data) + for func in ["cmd.exec_code", "cmd.run", "cmd.script"]: + assert func in state_obj.functions + + +def test_load_modules_dict(minion_opts): + """ + Test load_modules when providers is a dict, which is + not valid. Testing this state: + nginx: + pkg.installed: + - provider: {cmd: test} + """ + data = { + "state": "pkg", + "name": "nginx", + "__sls__": "test", + "__env__": "base", + "__id__": "nginx", + "provider": OrderedDict([("cmd", "test")]), + "order": 10000, + "fun": "installed", + } + mock_raw_mod = MagicMock() + patch_raw_mod = patch("salt.loader.raw_mod", mock_raw_mod) + with patch("salt.state.State._gather_pillar"): + with patch_raw_mod: + state_obj = salt.state.State(minion_opts) + state_obj.load_modules(data) + mock_raw_mod.assert_not_called() + + +def test_check_refresh_grains(minion_opts): + """ + Test check_refresh when using this state: + grains_refresh: + module.run: + - name: saltutil.refresh_grains + - reload_grains: true + Ensure that the grains are loaded when reload_grains + is set. + """ + data = { + "state": "module", + "name": "saltutil.refresh_grains", + "__sls__": "test", + "__env__": "base", + "__id__": "grains_refresh", + "reload_grains": True, + "order": 10000, + "fun": "run", + } + ret = { + "name": "saltutil.refresh_grains", + "changes": {"ret": True}, + "comment": "Module function saltutil.refresh_grains executed", + "result": True, + "__sls__": "test", + "__run_num__": 0, + } + mock_refresh = MagicMock() + patch_refresh = patch("salt.state.State.module_refresh", mock_refresh) + with patch("salt.state.State._gather_pillar"): + with patch_refresh: + state_obj = salt.state.State(minion_opts) + state_obj.check_refresh(data, ret) + mock_refresh.assert_called_once() + assert "cwd" in state_obj.opts["grains"] + + +def test_check_refresh_pillar(minion_opts, caplog): + """ + Test check_refresh when using this state: + pillar_refresh: + module.run: + - name: saltutil.refresh_pillar + - reload_pillar: true + Ensure the pillar is refreshed. + """ + data = { + "state": "module", + "name": "saltutil.refresh_pillar", + "__sls__": "test", + "__env__": "base", + "__id__": "pillar_refresh", + "reload_pillar": True, + "order": 10000, + "fun": "run", + } + ret = { + "name": "saltutil.refresh_pillar", + "changes": {"ret": False}, + "comment": "Module function saltutil.refresh_pillar executed", + "result": False, + "__sls__": "test", + "__run_num__": 0, + } + mock_refresh = MagicMock() + patch_refresh = patch("salt.state.State.module_refresh", mock_refresh) + mock_pillar = MagicMock() + patch_pillar = patch("salt.state.State._gather_pillar", mock_pillar) + with patch_pillar, patch_refresh: + with caplog.at_level(logging.DEBUG): + state_obj = salt.state.State(minion_opts) + state_obj.check_refresh(data, ret) + mock_refresh.assert_called_once() + assert "Refreshing pillar..." in caplog.text + + +def test_module_refresh_runtimeerror(minion_opts, caplog): + """ + test module_refresh when runtimerror occurs + """ + mock_importlib = MagicMock() + mock_importlib.side_effect = RuntimeError("Error") + patch_importlib = patch("importlib.reload", mock_importlib) + patch_pillar = patch("salt.state.State._gather_pillar", return_value="") + with patch_importlib, patch_pillar: + state_obj = salt.state.State(minion_opts) + state_obj.module_refresh() + assert ( + "Error encountered during module reload. Modules were not reloaded." + in caplog.text + ) + + +def test_module_refresh_typeerror(minion_opts, caplog): + """ + test module_refresh when typeerror occurs + """ + mock_importlib = MagicMock() + mock_importlib.side_effect = TypeError("Error") + patch_importlib = patch("importlib.reload", mock_importlib) + patch_pillar = patch("salt.state.State._gather_pillar", return_value="") + with patch_importlib, patch_pillar: + state_obj = salt.state.State(minion_opts) + state_obj.module_refresh() + assert ( + "Error encountered during module reload. Modules were not reloaded." + in caplog.text + ) diff --git a/tests/pytests/unit/state/test_state_format_slots.py b/tests/pytests/unit/state/test_state_format_slots.py index 4fb6f0f915d9..360f36c88d96 100644 --- a/tests/pytests/unit/state/test_state_format_slots.py +++ b/tests/pytests/unit/state/test_state_format_slots.py @@ -15,6 +15,11 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.core_test, +] + + @pytest.fixture def state_obj(minion_opts): with patch("salt.state.State._gather_pillar"): @@ -30,7 +35,6 @@ def test_format_slots_no_slots(state_obj): assert cdata == {"args": ["arg"], "kwargs": {"key": "val"}} -@pytest.mark.slow_test def test_format_slots_arg(state_obj): """ Test the format slots is calling a slot specified in args with corresponding arguments. @@ -46,7 +50,6 @@ def test_format_slots_arg(state_obj): assert cdata == {"args": ["fun_return"], "kwargs": {"key": "val"}} -@pytest.mark.slow_test def test_format_slots_dict_arg(state_obj): """ Test the format slots is calling a slot specified in dict arg. @@ -62,7 +65,6 @@ def test_format_slots_dict_arg(state_obj): assert cdata == {"args": [{"subarg": "fun_return"}], "kwargs": {"key": "val"}} -@pytest.mark.slow_test def test_format_slots_listdict_arg(state_obj): """ Test the format slots is calling a slot specified in list containing a dict. @@ -78,7 +80,6 @@ def test_format_slots_listdict_arg(state_obj): assert cdata == {"args": [[{"subarg": "fun_return"}]], "kwargs": {"key": "val"}} -@pytest.mark.slow_test def test_format_slots_liststr_arg(state_obj): """ Test the format slots is calling a slot specified in list containing a dict. @@ -94,7 +95,6 @@ def test_format_slots_liststr_arg(state_obj): assert cdata == {"args": [["fun_return"]], "kwargs": {"key": "val"}} -@pytest.mark.slow_test def test_format_slots_kwarg(state_obj): """ Test the format slots is calling a slot specified in kwargs with corresponding arguments. @@ -110,7 +110,6 @@ def test_format_slots_kwarg(state_obj): assert cdata == {"args": ["arg"], "kwargs": {"key": "fun_return"}} -@pytest.mark.slow_test def test_format_slots_multi(state_obj): """ Test the format slots is calling all slots with corresponding arguments when multiple slots @@ -150,7 +149,6 @@ def test_format_slots_multi(state_obj): } -@pytest.mark.slow_test def test_format_slots_malformed(state_obj): """ Test the format slots keeps malformed slots untouched. @@ -181,7 +179,6 @@ def test_format_slots_malformed(state_obj): assert cdata == sls_data -@pytest.mark.slow_test def test_slot_traverse_dict(state_obj): """ Test the slot parsing of dict response. @@ -198,7 +195,6 @@ def test_slot_traverse_dict(state_obj): assert cdata == {"args": ["arg"], "kwargs": {"key": "value1"}} -@pytest.mark.slow_test def test_slot_append(state_obj): """ Test the slot parsing of dict response. diff --git a/tests/pytests/unit/state/test_state_highstate.py b/tests/pytests/unit/state/test_state_highstate.py index 83529d98b0ca..a04c91f35c6c 100644 --- a/tests/pytests/unit/state/test_state_highstate.py +++ b/tests/pytests/unit/state/test_state_highstate.py @@ -8,11 +8,16 @@ import pytest # pylint: disable=unused-import import salt.state -from salt.utils.odict import OrderedDict +from salt.utils.odict import DefaultOrderedDict, OrderedDict log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.core_test, +] + + @pytest.fixture def root_dir(tmp_path): return tmp_path / "root_dir" @@ -347,3 +352,68 @@ def test_dont_extend_in_excluded_sls_file(highstate, state_tree_dir): ) ] ) + + +def test_verify_tops(highstate): + """ + test basic functionality of verify_tops + """ + tops = DefaultOrderedDict(OrderedDict) + tops["base"] = OrderedDict([("*", ["test", "test2"])]) + matches = highstate.verify_tops(tops) + # [] means there where no errors when verifying tops + assert matches == [] + + +def test_verify_tops_not_dict(highstate): + """ + test verify_tops when top data is not a dict + """ + matches = highstate.verify_tops(["base", "test", "test2"]) + assert matches == ["Top data was not formed as a dict"] + + +def test_verify_tops_env_empty(highstate): + """ + test verify_tops when the environment is empty + """ + tops = DefaultOrderedDict(OrderedDict) + tops[""] = OrderedDict([("*", ["test", "test2"])]) + matches = highstate.verify_tops(tops) + assert matches == ["Empty saltenv statement in top file"] + + +def test_verify_tops_sls_not_list(highstate): + """ + test verify_tops when the sls files are not a list + """ + tops = DefaultOrderedDict(OrderedDict) + tops["base"] = OrderedDict([("*", "test test2")]) + matches = highstate.verify_tops(tops) + # [] means there where no errors when verifying tops + assert matches == ["Malformed topfile (state declarations not formed as a list)"] + + +def test_verify_tops_match(highstate): + """ + test basic functionality of verify_tops when using a matcher + like `match: glob`. + """ + tops = DefaultOrderedDict(OrderedDict) + tops["base"] = OrderedDict( + [("*", [OrderedDict([("match", "glob")]), "test", "test2"])] + ) + matches = highstate.verify_tops(tops) + # [] means there where no errors when verifying tops + assert matches == [] + + +def test_verify_tops_match_none(highstate): + """ + test basic functionality of verify_tops when using a matcher + when it is empty, like `match: ""`. + """ + tops = DefaultOrderedDict(OrderedDict) + tops["base"] = OrderedDict([("*", [OrderedDict([("match", "")]), "test", "test2"])]) + matches = highstate.verify_tops(tops) + assert "Improperly formatted top file matcher in saltenv" in matches[0] diff --git a/tests/pytests/unit/state/test_state_options.py b/tests/pytests/unit/state/test_state_options.py index ccba8d291fff..7639567b0444 100644 --- a/tests/pytests/unit/state/test_state_options.py +++ b/tests/pytests/unit/state/test_state_options.py @@ -4,6 +4,10 @@ import salt.state +pytestmark = [ + pytest.mark.core_test, +] + @pytest.fixture def master_opts(master_opts): diff --git a/tests/pytests/unit/state/test_state_returns.py b/tests/pytests/unit/state/test_state_returns.py index 1a3253fbb8e2..65799dfeb016 100644 --- a/tests/pytests/unit/state/test_state_returns.py +++ b/tests/pytests/unit/state/test_state_returns.py @@ -11,6 +11,11 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.core_test, +] + + def test_state_output_check_changes_is_dict(): """ Test that changes key contains a dictionary. diff --git a/tests/pytests/unit/state/test_sub_state_returns.py b/tests/pytests/unit/state/test_sub_state_returns.py index d979f08a3eb6..1428d0d92f25 100644 --- a/tests/pytests/unit/state/test_sub_state_returns.py +++ b/tests/pytests/unit/state/test_sub_state_returns.py @@ -11,6 +11,11 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.core_test, +] + + def test_sub_state_output_check_changes_is_dict(): """ Test that changes key contains a dictionary. diff --git a/tests/pytests/unit/states/file/test_managed.py b/tests/pytests/unit/states/file/test_managed.py index 0f5da2dac275..4a826c268690 100644 --- a/tests/pytests/unit/states/file/test_managed.py +++ b/tests/pytests/unit/states/file/test_managed.py @@ -405,3 +405,29 @@ def test_managed_test_mode_user_group_not_present(): ) assert ret["result"] is not False assert "is not available" not in ret["comment"] + + +@pytest.mark.parametrize( + "source,check_result", + [ + ("http://@$@dead_link@$@/src.tar.gz", True), + ("https://@$@dead_link@$@/src.tar.gz", True), + ("ftp://@$@dead_link@$@/src.tar.gz", True), + ("salt://@$@dead_link@$@/src.tar.gz", False), + ("file://@$@dead_link@$@/src.tar.gz", False), + ( + ["http://@$@dead_link@$@/src.tar.gz", "https://@$@dead_link@$@/src.tar.gz"], + True, + ), + ( + ["salt://@$@dead_link@$@/src.tar.gz", "file://@$@dead_link@$@/src.tar.gz"], + False, + ), + ( + ["http://@$@dead_link@$@/src.tar.gz", "file://@$@dead_link@$@/src.tar.gz"], + True, + ), + ], +) +def test_sources_source_hash_check(source, check_result): + assert filestate._http_ftp_check(source) is check_result diff --git a/tests/pytests/unit/states/file/test_rename.py b/tests/pytests/unit/states/file/test_rename.py index bccb59b8c03a..8f1a3aa6f1ed 100644 --- a/tests/pytests/unit/states/file/test_rename.py +++ b/tests/pytests/unit/states/file/test_rename.py @@ -10,6 +10,7 @@ import salt.serializers.python as pythonserializer import salt.serializers.yaml as yamlserializer import salt.states.file as filestate +import salt.utils.platform from tests.support.mock import MagicMock, patch log = logging.getLogger(__name__) @@ -136,3 +137,56 @@ def test_rename(tmp_path): } ) assert filestate.rename(name, source) == ret + + mock_lex = MagicMock(side_effect=[True, False, False]) + with patch.object(os.path, "isabs", mock_t), patch.object( + os.path, "lexists", mock_lex + ), patch.object(os.path, "isdir", mock_f), patch.dict( + filestate.__salt__, + {"file.makedirs": MagicMock(side_effect=filestate.CommandExecutionError())}, + ), patch.object( + os.path, "islink", mock_f + ), patch.dict( + filestate.__opts__, {"test": False} + ), patch.object( + shutil, "move", MagicMock() + ): + if salt.utils.platform.is_windows(): + comt = "Drive C: is not mapped" + else: + comt = "Drive is not mapped" + ret.update( + { + "name": name, + "comment": comt, + "result": False, + "changes": {}, + } + ) + assert filestate.rename(name, source, makedirs=True) == ret + + mock_lex = MagicMock(side_effect=[True, False, False]) + mock_link = str(tmp_path / "salt" / "link") + with patch.object(os.path, "isabs", mock_t), patch.object( + os.path, "lexists", mock_lex + ), patch.object(os.path, "isdir", mock_t), patch.object( + os.path, "islink", mock_t + ), patch( + "salt.utils.path.readlink", MagicMock(return_value=mock_link) + ), patch.dict( + filestate.__opts__, {"test": False} + ), patch.object( + os, "symlink", MagicMock() + ), patch.object( + os, "unlink", MagicMock() + ): + comt = 'Moved "{}" to "{}"'.format(source, name) + ret.update( + { + "name": name, + "comment": comt, + "result": True, + "changes": {name: source}, + } + ) + assert filestate.rename(name, source) == ret diff --git a/tests/pytests/unit/states/postgresql/test_group.py b/tests/pytests/unit/states/postgresql/test_group.py index 2eb77bf4c0fe..6957ce545403 100644 --- a/tests/pytests/unit/states/postgresql/test_group.py +++ b/tests/pytests/unit/states/postgresql/test_group.py @@ -1,4 +1,5 @@ import pytest +from pytestskipmarkers.utils import platform import salt.modules.postgres as postgres import salt.states.postgres_group as postgres_group @@ -19,6 +20,8 @@ def fixture_db_args(): @pytest.fixture(name="md5_pw") def fixture_md5_pw(): + if platform.is_fips_enabled(): + pytest.skip("Test cannot run on a FIPS enabled platform") # 'md5' + md5('password' + 'groupname') return "md58b14c378fab8ef0dc227f4e6d6787a87" @@ -79,6 +82,7 @@ def configure_loader_modules(mocks): # ========== +@pytest.mark.skip_on_fips_enabled_platform def test_present_create_basic(mocks, db_args): assert postgres_group.present("groupname") == { "name": "groupname", @@ -343,6 +347,7 @@ def test_present_update_md5_password(mocks, existing_group, md5_pw, db_args): ) +@pytest.mark.skip_on_fips_enabled_platform def test_present_update_error(mocks, existing_group): existing_group["password"] = "md500000000000000000000000000000000" mocks["postgres.role_get"].return_value = existing_group diff --git a/tests/pytests/unit/states/postgresql/test_user.py b/tests/pytests/unit/states/postgresql/test_user.py index 46d76535144d..1d5dba9b1bb3 100644 --- a/tests/pytests/unit/states/postgresql/test_user.py +++ b/tests/pytests/unit/states/postgresql/test_user.py @@ -1,4 +1,5 @@ import pytest +from pytestskipmarkers.utils import platform import salt.modules.postgres as postgres import salt.states.postgres_user as postgres_user @@ -25,6 +26,8 @@ def fixture_db_args(): @pytest.fixture(name="md5_pw") def fixture_md5_pw(): # 'md5' + md5('password' + 'username') + if platform.is_fips_enabled(): + pytest.skip("Test cannot run on a FIPS enabled platform") return "md55a231fcdb710d73268c4f44283487ba2" diff --git a/tests/pytests/unit/states/saltmod/__init__.py b/tests/pytests/unit/states/saltmod/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/pytests/unit/states/saltmod/test_function.py b/tests/pytests/unit/states/saltmod/test_function.py new file mode 100644 index 000000000000..90b3e09d0e69 --- /dev/null +++ b/tests/pytests/unit/states/saltmod/test_function.py @@ -0,0 +1,527 @@ +import pytest + +import salt.states.saltmod as saltmod +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + saltmod: { + "__opts__": minion_opts, + }, + } + + +def test_function(): + """ + Test to execute a single module function on a remote + minion via salt or salt-ssh + """ + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {}, + "result": None, + "comment": "Function state would be executed on target {}".format(tgt), + } + + with patch.dict(saltmod.__opts__, {"test": True}): + ret = saltmod.function(name, tgt) + assert ret == expected + + expected.update( + { + "result": True, + "changes": {"ret": {tgt: ""}}, + "comment": ( + "Function ran successfully. Function state ran on {}.".format(tgt) + ), + } + ) + with patch.dict(saltmod.__opts__, {"test": False}): + mock_ret = {"larry": {"ret": "", "retcode": 0, "failed": False}} + mock_cmd = MagicMock(return_value=mock_ret) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock_cmd}): + ret = saltmod.function(name, tgt) + assert ret == expected + + +def test_function_when_no_minions_match(): + """ + Test to execute a single module function on a remote + minion via salt or salt-ssh + """ + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {}, + "result": False, + "comment": "No minions responded", + } + + with patch.dict(saltmod.__opts__, {"test": False}): + with patch.dict(saltmod.__salt__, {"saltutil.cmd": MagicMock(return_value={})}): + ret = saltmod.function(name, tgt) + assert ret == expected + + +def test_function_ssh(): + """ + Test saltmod function passes roster to saltutil.cmd + """ + cmd_mock = MagicMock() + with patch.dict(saltmod.__opts__, {"test": False}), patch.dict( + saltmod.__salt__, {"saltutil.cmd": cmd_mock} + ): + saltmod.function("state", tgt="*", ssh=True, roster="my_roster") + assert "roster" in cmd_mock.call_args.kwargs + assert cmd_mock.call_args.kwargs["roster"] == "my_roster" + + +def test_arg(): + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {"ret": {tgt: ""}}, + "result": True, + "comment": f"Function ran successfully. Function state ran on {tgt}.", + "warnings": ["Please specify 'arg' as a list of arguments."], + } + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + }, + ) + args = ["foo", "bar"] + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, arg=" ".join(args)) + assert ret == expected + mock.assert_called_once() + assert "arg" in mock.call_args.kwargs + assert mock.call_args.kwargs["arg"] == args + + expected.pop("warnings") + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + }, + ) + args = ["foo", "bar"] + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, arg=args) + assert ret == expected + mock.assert_called_once() + assert "arg" in mock.call_args.kwargs + assert mock.call_args.kwargs["arg"] == args + + +def test_batch(): + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {"ret": {tgt: ""}}, + "result": True, + "comment": f"Function ran successfully. Function state ran on {tgt}.", + } + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + }, + ) + batch = "yes" + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, batch=batch) + assert ret == expected + mock.assert_called_once() + assert "batch" in mock.call_args.kwargs + assert mock.call_args.kwargs["batch"] == batch + + batch = ["yes", "no"] + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, batch=batch) + assert ret == expected + assert "batch" in mock.call_args.kwargs + assert mock.call_args.kwargs["batch"] == str(batch) + + +def test_subset(): + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {"ret": {tgt: ""}}, + "result": True, + "comment": f"Function ran successfully. Function state ran on {tgt}.", + } + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + }, + ) + subset = "yes" + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, subset=subset) + assert ret == expected + mock.assert_called_once() + assert "subset" in mock.call_args.kwargs + assert mock.call_args.kwargs["subset"] == subset + + +def test_ret_config(): + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {"ret": {tgt: ""}}, + "result": True, + "comment": f"Function ran successfully. Function state ran on {tgt}.", + } + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + }, + ) + ret_config = {"yes": "no"} + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, ret_config=ret_config) + assert ret == expected + mock.assert_called_once() + assert "ret_config" in mock.call_args.kwargs + assert mock.call_args.kwargs["ret_config"] == ret_config + + +def test_ret_kwargs(): + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {"ret": {tgt: ""}}, + "result": True, + "comment": f"Function ran successfully. Function state ran on {tgt}.", + } + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + }, + ) + ret_kwargs = {"yes": "no"} + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, ret_kwargs=ret_kwargs) + assert ret == expected + mock.assert_called_once() + assert "ret_kwargs" in mock.call_args.kwargs + assert mock.call_args.kwargs["ret_kwargs"] == ret_kwargs + + +def test_failhard(): + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {"ret": {tgt: ""}}, + "result": True, + "comment": f"Function ran successfully. Function state ran on {tgt}.", + } + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + }, + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, failhard=True) + assert ret == expected + mock.assert_called_once() + assert "failhard" in mock.call_args.kwargs + assert mock.call_args.kwargs["failhard"] is True + + with patch.dict(saltmod.__opts__, {"test": False, "failhard": True}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + }, + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt) + assert ret == expected + mock.assert_called_once() + assert "failhard" in mock.call_args.kwargs + assert mock.call_args.kwargs["failhard"] is True + + +def test_fail_minions(): + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": { + "ret": { + tgt: "", + "red": "red", + "green": "green", + "blue": "blue", + }, + }, + "result": True, + } + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + "red": { + "ret": "red", + "retcode": 0, + "failed": False, + }, + "green": { + "ret": "green", + "retcode": 0, + "failed": False, + }, + "blue": { + "ret": "blue", + "retcode": 0, + "failed": False, + }, + }, + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, fail_minions="red") + ret_comment = ret.pop("comment") + assert ret == expected + assert "Function ran successfully. Function state ran on " in ret_comment + for part in (tgt, "red", "green", "blue"): + assert part in ret_comment + + mock.assert_called_once() + + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, fail_minions="red,green") + ret_comment = ret.pop("comment") + assert ret == expected + assert "Function ran successfully. Function state ran on " in ret_comment + for part in (tgt, "red", "green", "blue"): + assert part in ret_comment + + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, fail_minions=["red", "green"]) + ret_comment = ret.pop("comment") + assert ret == expected + assert "Function ran successfully. Function state ran on " in ret_comment + for part in (tgt, "red", "green", "blue"): + assert part in ret_comment + + expected["warnings"] = [ + "'fail_minions' needs to be a list or a comma separated string. Ignored." + ] + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, fail_minions=()) + ret_comment = ret.pop("comment") + assert ret == expected + assert "Function ran successfully. Function state ran on " in ret_comment + for part in (tgt, "red", "green", "blue"): + assert part in ret_comment + + expected.pop("warnings") + expected["changes"]["ret"]["red"] = False + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + "red": { + "ret": "red", + "retcode": 0, + "failed": True, + }, + "green": { + "ret": "green", + "retcode": 0, + "failed": False, + }, + "blue": { + "ret": "blue", + "retcode": 0, + "failed": False, + }, + }, + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, fail_minions="red") + ret_comment = ret.pop("comment") + assert ret == expected + assert "Function ran successfully. Function state ran on " in ret_comment + for part in (tgt, "red", "green", "blue"): + assert part in ret_comment + + mock.assert_called_once() + + expected["result"] = False + expected["changes"]["ret"]["green"] = False + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + "red": { + "ret": "red", + "retcode": 0, + "failed": True, + }, + "green": { + "ret": "green", + "retcode": 0, + "failed": True, + }, + "blue": { + "ret": "blue", + "retcode": 0, + "failed": False, + }, + }, + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, fail_minions="red") + ret_comment = ret.pop("comment") + assert ret == expected + assert "Running function state failed on minions: green " in ret_comment + assert "Function state ran on " in ret_comment + for part in (tgt, "red", "green", "blue"): + assert part in ret_comment + + mock.assert_called_once() + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + tgt: { + "ret": "", + "retcode": 0, + "failed": False, + }, + "red": { + "ret": "red", + "retcode": 1, + "failed": True, + }, + "green": { + "ret": "green", + "retcode": 1, + "failed": True, + }, + "blue": { + "ret": "blue", + "retcode": 0, + "failed": False, + }, + }, + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, fail_minions="red") + ret_comment = ret.pop("comment") + assert ret == expected + try: + assert ( + "Running function state failed on minions: green, red" + in ret_comment + ) + except AssertionError: + assert ( + "Running function state failed on minions: red, green" + in ret_comment + ) + assert "Function state ran on " in ret_comment + for part in (tgt, "red", "green", "blue"): + assert part in ret_comment + + mock.assert_called_once() + + +def test_exception_raised(): + name = "state" + tgt = "larry" + + expected = { + "name": name, + "changes": {}, + "result": False, + "comment": "I'm an exception!", + } + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock(side_effect=Exception("I'm an exception!")) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.function(name, tgt, failhard=True) + assert ret == expected + mock.assert_called_once() + assert "failhard" in mock.call_args.kwargs + assert mock.call_args.kwargs["failhard"] is True diff --git a/tests/pytests/unit/states/saltmod/test_parallel_runners.py b/tests/pytests/unit/states/saltmod/test_parallel_runners.py new file mode 100644 index 000000000000..6889877942f1 --- /dev/null +++ b/tests/pytests/unit/states/saltmod/test_parallel_runners.py @@ -0,0 +1,253 @@ +import pytest + +import salt.exceptions +import salt.states.saltmod as saltmod +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + saltmod: { + "__env__": "base", + "__opts__": minion_opts, + }, + } + + +def test_runners(): + name = "runner-name" + runner_1 = "runner-1" + runner_1_ret = { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name 1_|-show_notification": { + "comment": f"Notify me: {runner_1}", + "name": "this is a name 1", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + } + + expected = { + "name": name, + "changes": { + "ret": { + runner_1: runner_1_ret, + } + }, + "result": True, + "comment": "All runner functions executed successfully.", + } + mock = MagicMock(side_effect=[{"return": runner_1_ret}]) + with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}): + ret = saltmod.parallel_runners(name, runner_1) + assert ret == expected + + runner_2 = "runner-2" + runner_2_ret = { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name 2_|-show_notification": { + "comment": f"Notify me: {runner_2}", + "name": "this is a name 2", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + } + expected["changes"]["ret"][runner_2] = runner_2_ret + mock = MagicMock(side_effect=[{"return": runner_1_ret}, {"return": runner_2_ret}]) + with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}): + ret = saltmod.parallel_runners( + name, {runner_1: {"name": name}, runner_2: {"name": name}} + ) + assert ret == expected + + expected = { + "name": name, + "result": False, + "changes": {}, + "comment": "The runners parameter must be a string or dict.", + } + ret = saltmod.parallel_runners(name, [runner_1, runner_2]) + assert ret == expected + + +def test_exception(): + name = "runner-name" + runner_1 = "runner-1" + runner_1_ret = { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + } + + expected = { + "name": name, + "result": False, + "changes": {}, + "comment": "One of the runners raised an exception: An Exception!", + "success": False, + } + runner_2 = "runner-2" + mock = MagicMock( + side_effect=[ + {"return": runner_1_ret}, + salt.exceptions.SaltException("An Exception!"), + ] + ) + with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}): + ret = saltmod.parallel_runners( + name, {runner_1: {"name": name}, runner_2: {"name": name}} + ) + assert ret == expected + + +def test_failed(): + + name = "runner-name" + runner_1 = "runner-1" + runner_1_ret = { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name 1_|-show_notification": { + "comment": f"Notify me: {runner_1}", + "name": "this is a name 1", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {"foo": "bar"}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + "exit_code": 1, + } + runner_2 = "runner-2" + runner_2_ret = { + "jid": "20170406104341210934", + "retcode": 1, + "ret": { + "test_|-notify_me_|-this is a name 2_|-show_notification": { + "comment": f"Notify me: {runner_2}", + "name": "this is a name 2", + "start_time": "10:43:41.487565", + "result": False, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + "exit_code": 0, + } + + expected = { + "name": name, + "changes": { + "ret": { + runner_1: runner_1_ret, + } + }, + "result": False, + "comment": f"Runner {runner_1} failed.", + } + mock = MagicMock(side_effect=[{"return": runner_1_ret}]) + with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}): + ret = saltmod.parallel_runners(name, runner_1) + assert ret == expected + + expected["changes"]["ret"][runner_2] = runner_2_ret + mock = MagicMock(side_effect=[{"return": runner_1_ret}, {"return": runner_2_ret}]) + with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}): + ret = saltmod.parallel_runners( + name, {runner_1: {"name": name}, runner_2: {"name": name}} + ) + assert ret == expected + + runner_3 = "runner-3" + runner_3_ret = { + "jid": "20170406104341210934", + "retcode": 1, + "ret": { + "test_|-notify_me_|-this is a name 2_|-show_notification": { + "comment": f"Notify me: {runner_2}", + "name": "this is a name 2", + "start_time": "10:43:41.487565", + "result": False, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + "exit_code": 1, + } + + expected["changes"]["ret"][runner_3] = runner_3_ret + expected.pop("comment") + mock = MagicMock( + side_effect=[ + {"return": runner_1_ret}, + {"return": runner_2_ret}, + {"return": runner_3_ret}, + ] + ) + with patch.dict(saltmod.__salt__, {"saltutil.runner": mock}): + ret = saltmod.parallel_runners( + name, + { + runner_1: {"name": name}, + runner_2: {"name": name}, + runner_3: {"name": name}, + }, + ) + ret_comment = ret.pop("comment") + assert ret == expected + assert "Runners " in ret_comment + assert " failed." in ret_comment + assert runner_1 in ret_comment + assert runner_3 in ret_comment + assert runner_2 not in ret_comment diff --git a/tests/pytests/unit/states/saltmod/test_runner.py b/tests/pytests/unit/states/saltmod/test_runner.py new file mode 100644 index 000000000000..3a25b4eedea5 --- /dev/null +++ b/tests/pytests/unit/states/saltmod/test_runner.py @@ -0,0 +1,48 @@ +import pytest + +import salt.states.saltmod as saltmod +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + saltmod: { + "__env__": "base", + "__opts__": minion_opts, + }, + } + + +def test_test_mode(): + name = "bah" + + expected = { + "name": name, + "changes": {}, + "result": None, + "comment": f"Runner function '{name}' would be executed.", + } + + with patch.dict(saltmod.__opts__, {"test": True}): + ret = saltmod.runner(name) + assert ret == expected + + +def test_runner(): + """ + Test to execute a runner module on the master + """ + name = "state" + + expected = { + "changes": {"return": True}, + "name": "state", + "result": True, + "comment": "Runner function 'state' executed.", + } + with patch.dict( + saltmod.__salt__, {"saltutil.runner": MagicMock(return_value={"return": True})} + ): + ret = saltmod.runner(name) + assert ret == expected diff --git a/tests/pytests/unit/states/saltmod/test_state.py b/tests/pytests/unit/states/saltmod/test_state.py new file mode 100644 index 000000000000..44100617338a --- /dev/null +++ b/tests/pytests/unit/states/saltmod/test_state.py @@ -0,0 +1,980 @@ +import pytest + +import salt.modules.saltutil +import salt.states.saltmod as saltmod +import salt.utils.event +import salt.utils.jid +import salt.utils.state +from tests.support.mock import MagicMock, create_autospec, patch + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + saltmod: { + "__opts__": minion_opts, + "__salt__": {"saltutil.cmd": MagicMock()}, + "__utils__": {"state.check_result": salt.utils.state.check_result}, + }, + } + + +@pytest.fixture +def fake_cmd(): + _fake_cmd = create_autospec(salt.modules.saltutil.cmd) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": _fake_cmd}): + yield _fake_cmd + + +@pytest.mark.parametrize( + "exclude", + [True, False], +) +def test_exclude_parameter_gets_passed(exclude, fake_cmd): + """ + Smoke test for for salt.states.statemod.state(). Ensures that we + don't take an exception if optional parameters are not specified in + __opts__ or __env__. + """ + args = ("webserver_setup", "webserver2") + expected_exclude = exclude + kwargs = { + "tgt_type": "glob", + "exclude": expected_exclude, + "highstate": True, + } + + saltmod.state(*args, **kwargs) + + call = fake_cmd.call_args[1] + assert call["kwarg"]["exclude"] == expected_exclude + + +def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd): + # Make sure we don't barf on existing behavior + args = ("webserver_setup", "webserver2") + kwargs_without_exclude = { + "tgt_type": "glob", + "highstate": True, + } + + saltmod.state(*args, **kwargs_without_exclude) + + call = fake_cmd.call_args[1] + assert "exclude" not in call["kwarg"] + + +def test_state_smoke_test(): + """ + Smoke test for for salt.states.statemod.state(). Ensures that we + don't take an exception if optional parameters are not specified in + __opts__ or __env__. + """ + args = ("webserver_setup", "webserver2") + kwargs = { + "tgt_type": "glob", + "fail_minions": None, + "pillar": None, + "top": None, + "batch": None, + "orchestration_jid": None, + "sls": "vroom", + "queue": False, + "concurrent": False, + "highstate": None, + "expr_form": None, + "ret": "", + "ssh": False, + "timeout": None, + "test": False, + "allow_fail": 0, + "saltenv": None, + "expect_minions": False, + } + with patch.dict(saltmod.__opts__, {"id": "webserver2"}): + ret = saltmod.state(*args, **kwargs) + expected = { + "comment": "States ran successfully.", + "changes": {}, + "name": "webserver_setup", + "result": True, + } + assert ret == expected + + +def test_state(): + """ + Test to invoke a state run on a given target + """ + name = "state" + tgt = "minion1" + + expected = { + "name": name, + "changes": {}, + "result": False, + "comment": "No highstate or sls specified, no execution made", + } + ret = saltmod.state(name, tgt) + assert ret == expected + + expected.update({"comment": "Must pass in boolean for value of 'concurrent'"}) + ret = saltmod.state(name, tgt, highstate=True, concurrent="a") + assert ret == expected + + expected.update( + { + "result": True, + "comment": "States ran successfully.", + } + ) + with patch.dict(saltmod.__opts__, {"test": True}): + ret = saltmod.state(name, tgt, highstate=True) + assert ret == expected + + silver_ret = { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {"foo": "bar"}, + "__id__": "notify_me", + } + } + expected.update( + { + "comment": "States ran successfully. Updating silver.", + "result": None, + "__jid__": "20170406104341210934", + "changes": { + "out": "highstate", + "ret": {"silver": silver_ret}, + }, + } + ) + with patch.dict(saltmod.__opts__, {"test": True}): + mock = MagicMock( + return_value={ + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": silver_ret, + "out": "highstate", + } + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True) + assert ret == expected + mock.assert_called_once() + + expected.update( + { + "comment": "States ran successfully. No changes made to silver.", + "result": True, + "__jid__": "20170406104341210934", + "changes": {}, + } + ) + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + } + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True) + assert ret == expected + mock.assert_called_once() + + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, top="the-top") + assert "arg" in mock.call_args.kwargs + assert "the-top" in mock.call_args.kwargs["arg"] + + for pass_kw in ("ret_config", "ret_kwargs", "batch", "subset"): + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + kwargs = {pass_kw: f"{pass_kw}_value"} + ret = saltmod.state(name, tgt, highstate=True, **{pass_kw: kwargs}) + assert pass_kw in mock.call_args.kwargs + if pass_kw == "batch": + assert mock.call_args.kwargs[pass_kw] == str(kwargs) + else: + assert mock.call_args.kwargs[pass_kw] == kwargs + assert ret == expected + + for pass_kw in ("pillar", "pillarenv", "saltenv"): + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + kwargs = {pass_kw: f"{pass_kw}_value"} + ret = saltmod.state(name, tgt, highstate=True, **{pass_kw: kwargs}) + assert "kwarg" in mock.call_args.kwargs + assert pass_kw in mock.call_args.kwargs["kwarg"] + assert mock.call_args.kwargs["kwarg"][pass_kw] == kwargs + assert ret == expected + + test_batch_return = { + "minion1": { + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + }, + "retcode": 0, + }, + "out": "highstate", + }, + "minion2": { + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + }, + "retcode": 0, + }, + "out": "highstate", + }, + "minion3": { + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + }, + "retcode": 0, + }, + "out": "highstate", + }, + } + expected.update( + { + "comment": ( + "States ran successfully. No changes made to minion1, minion3," + " minion2." + ) + } + ) + del expected["__jid__"] + with patch.dict(saltmod.__opts__, {"test": False}): + with patch.dict( + saltmod.__salt__, + {"saltutil.cmd": MagicMock(return_value=test_batch_return)}, + ): + state_run = saltmod.state(name, tgt, highstate=True) + + # Test return without checking the comment contents. Comments are tested later. + comment = state_run.pop("comment") + expected.pop("comment") + assert state_run == expected + + # Check the comment contents in a non-order specific way (ordering fails sometimes on PY3) + assert "States ran successfully. No changes made to" in comment + for minion in ["minion1", "minion2", "minion3"]: + assert minion in comment + + +def test_state_masterless(): + """ + Test to invoke a state run masterless + """ + name = "state" + minion_id = "masterless-minion" + + expected = { + "name": name, + "changes": {}, + "comment": f"States ran successfully. No changes made to {minion_id}.", + "result": True, + } + with patch.dict( + saltmod.__opts__, + {"test": False, "__role": "minion", "file_client": "local", "id": minion_id}, + ): + mock = MagicMock( + return_value={ + minion_id: { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + } + } + ) + with patch.dict(saltmod.__salt__, {"state.highstate": mock}): + ret = saltmod.state(name, minion_id, highstate=True) + assert ret == expected + mock.assert_called_once() + with patch.dict(saltmod.__salt__, {"state.top": mock}): + ret = saltmod.state(name, minion_id, top="the-top") + assert ret == expected + assert "topfn" in mock.call_args.kwargs + assert mock.call_args.kwargs["topfn"] == "the-top" + with patch.dict(saltmod.__salt__, {"state.sls": mock}): + ret = saltmod.state(name, minion_id, sls="the-sls") + assert ret == expected + assert "mods" in mock.call_args.kwargs + assert mock.call_args.kwargs["mods"] == "the-sls" + with patch.dict(saltmod.__salt__, {"state.sls": mock}): + ret = saltmod.state(name, minion_id, sls=["the-sls-1", "the-sls-2"]) + assert ret == expected + assert "mods" in mock.call_args.kwargs + assert mock.call_args.kwargs["mods"] == "the-sls-1,the-sls-2" + + +def test_state_failhard(): + + name = "state" + tgt = "minion1" + + expected = { + "name": name, + "changes": {}, + "comment": "States ran successfully. No changes made to silver.", + "result": True, + "__jid__": "20170406104341210934", + } + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + } + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True, failhard=True) + assert ret == expected + mock.assert_called_once() + assert "failhard" in mock.call_args.kwargs + assert mock.call_args.kwargs["failhard"] is True + + with patch.dict(saltmod.__opts__, {"test": False, "failhard": True}): + mock = MagicMock( + return_value={ + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + } + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True) + assert ret == expected + mock.assert_called_once() + assert "failhard" in mock.call_args.kwargs + assert mock.call_args.kwargs["failhard"] is True + + +def test_state_no_returns(): + + name = "state" + tgt = "minion1" + + expected = { + "name": name, + "changes": {}, + "result": False, + "comment": "No minions returned", + } + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock(return_value={}) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True) + assert ret == expected + mock.assert_called_once() + + +def test_state_failed_and_expected_minions(): + + name = "state" + tgt = "minion1" + + expected = { + "name": name, + "changes": {"out": "highstate", "ret": {"silver": False}}, + "comment": "Run failed on minions: silver", + "result": False, + "__jid__": "20170406104341210934", + } + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "gold": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + }, + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True) + assert ret == expected + mock.assert_called_once() + + expected.update( + { + "changes": { + "out": "highstate", + "ret": {"bronze": False, "charcoal": False, "silver": False}, + }, + "comment": "Run failed on minions: silver, bronze", + } + ) + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + "charcoal": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "bronze": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "gold": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + }, + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True, fail_minions="charcoal") + ret_comment = ret.pop("comment") + expected.pop("comment") + assert ret == expected + # The order can be different, hence asserting like this + assert "Run failed on minions: " in ret_comment + assert "silver" in ret_comment + assert "bronze" in ret_comment + mock.assert_called_once() + + expected.update( + { + "changes": { + "out": "highstate", + "ret": {"bronze": False, "charcoal": False, "silver": False}, + }, + "comment": "Run failed on minions: silver", + } + ) + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + "charcoal": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "bronze": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "gold": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + }, + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state( + name, tgt, highstate=True, fail_minions="bronze,charcoal" + ) + assert ret == expected + mock.assert_called_once() + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state( + name, tgt, highstate=True, fail_minions=["bronze", "charcoal"] + ) + assert ret == expected + + expected.pop("__jid__") + expected.update( + { + "result": True, + "changes": {}, + "comment": "States ran successfully.", + "warnings": [ + "'fail_minions' needs to be a list or a comma separated string. Ignored.", + ], + } + ) + ret = saltmod.state(name, tgt, highstate=True, fail_minions={}) + assert ret == expected + + +def test_state_allow_fail(): + + name = "state" + tgt = "minion1" + + expected = { + "name": name, + "changes": {"out": "highstate", "ret": {"silver": False}}, + "comment": "States ran successfully. Updating silver. No changes made to gold.", + "result": True, + "__jid__": "20170406104341210934", + } + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "gold": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + }, + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True, allow_fail=1) + assert ret == expected + mock.assert_called_once() + + gold_ret = { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {"foo": "bar"}, + "__id__": "notify_me", + } + } + + expected["changes"]["ret"]["gold"] = gold_ret + + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "gold": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": gold_ret, + "out": "highstate", + }, + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True, allow_fail=1) + ret_comment = ret.pop("comment") + expected.pop("comment") + assert ret == expected + # The order can be different, hence asserting like this + assert "States ran successfully. Updating " in ret_comment + assert "silver" in ret_comment + assert "gold" in ret_comment + mock.assert_called_once() + + expected.update( + { + "changes": { + "out": "highstate", + "ret": {"bronze": False, "charcoal": False, "silver": False}, + }, + "comment": "Run failed on minions: silver, bronze", + "result": False, + } + ) + with patch.dict(saltmod.__opts__, {"test": False}): + mock = MagicMock( + return_value={ + "charcoal": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "bronze": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "silver": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "failed": True, + "out": "highstate", + }, + "gold": { + "jid": "20170406104341210934", + "retcode": 0, + "ret": { + "test_|-notify_me_|-this is a name_|-show_notification": { + "comment": "Notify me", + "name": "this is a name", + "start_time": "10:43:41.487565", + "result": True, + "duration": 0.35, + "__run_num__": 0, + "__sls__": "demo", + "changes": {}, + "__id__": "notify_me", + } + }, + "out": "highstate", + }, + } + ) + with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): + ret = saltmod.state(name, tgt, highstate=True, allow_fail=1) + ret_comment = ret.pop("comment") + expected.pop("comment") + assert ret == expected + # The order can be different, hence asserting like this + assert "Run failed on minions: " in ret_comment + assert "silver" in ret_comment + assert "bronze" in ret_comment + mock.assert_called_once() + + expected = { + "name": name, + "changes": {}, + "result": False, + "comment": "Passed invalid value for 'allow_fail', must be an int", + } + ret = saltmod.state(name, tgt, allow_fail="a") + assert ret == expected + + +def test_roster(): + """ + Test saltmod state passes roster to saltutil.cmd + """ + cmd_mock = MagicMock() + with patch.dict(saltmod.__salt__, {"saltutil.cmd": cmd_mock}): + ret = saltmod.state( + "state.sls", tgt="*", ssh=True, highstate=True, roster="my_roster" + ) + assert "roster" in cmd_mock.call_args.kwargs + assert cmd_mock.call_args.kwargs["roster"] == "my_roster" diff --git a/tests/pytests/unit/states/saltmod/test_wait_for_event.py b/tests/pytests/unit/states/saltmod/test_wait_for_event.py new file mode 100644 index 000000000000..f2a005ae4915 --- /dev/null +++ b/tests/pytests/unit/states/saltmod/test_wait_for_event.py @@ -0,0 +1,131 @@ +import copy + +import pytest + +import salt.states.saltmod as saltmod +import salt.utils.state +from tests.support.mock import MagicMock, patch + + +class MockedEvent: + """ + Mocked event class + """ + + def __init__(self, data): + self.full = None + self.flag = None + self._data = data + + def get_event(self, full): + """ + Mock get_event method + """ + self.full = full + if self.flag: + return self._data + return None + + def __enter__(self): + return self + + def __exit__(self, *args): + pass + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + saltmod: { + "__opts__": minion_opts, + }, + } + + +def test_test_mode(): + name = "presence" + event_id = "lost" + tgt = ["minion_1", "minion_2", "minion_3"] + + expected = { + "name": name, + "changes": {}, + "result": None, + "comment": f"Orchestration would wait for event '{name}'", + } + + with patch.dict(saltmod.__opts__, {"test": True}): + ret = saltmod.wait_for_event(name, tgt, event_id=event_id, timeout=-1.0) + assert ret == expected + + +def test_wait_for_event(): + """ + Test to watch Salt's event bus and block until a condition is met + """ + name = "state" + tgt = "minion1" + + ret = { + "name": name, + "changes": {}, + "result": False, + "comment": "Timeout value reached.", + } + + mocked_event = MockedEvent({"tag": name, "data": {}}) + with patch.object( + salt.utils.event, "get_event", MagicMock(return_value=mocked_event) + ): + with patch.dict(saltmod.__opts__, {"sock_dir": True, "transport": True}): + with patch("salt.states.saltmod.time.time", MagicMock(return_value=1.0)): + assert saltmod.wait_for_event(name, "salt", timeout=-1.0) == ret + + mocked_event.flag = True + ret.update( + {"comment": "All events seen in 0.0 seconds.", "result": True} + ) + assert saltmod.wait_for_event(name, "") == ret + + ret.update({"comment": "Timeout value reached.", "result": False}) + assert saltmod.wait_for_event(name, tgt, timeout=-1.0) == ret + + +def test_list_single_event(): + """ + Test to watch Salt's event bus and block until a condition is met + """ + name = "presence" + event_id = "lost" + tgt = ["minion_1", "minion_2", "minion_3"] + + expected = { + "name": name, + "changes": {}, + "result": False, + "comment": "Timeout value reached.", + } + + mocked_event = MockedEvent({"tag": name, "data": {"lost": tgt}}) + with patch.object( + salt.utils.event, "get_event", MagicMock(return_value=mocked_event) + ): + with patch.dict(saltmod.__opts__, {"sock_dir": True, "transport": True}): + with patch("salt.states.saltmod.time.time", MagicMock(return_value=1.0)): + expected.update({"comment": "Timeout value reached.", "result": False}) + ret = saltmod.wait_for_event(name, tgt, event_id=event_id, timeout=-1.0) + assert ret == expected + + mocked_event.flag = True + expected.update( + { + "name": name, + "changes": {"minions_seen": tgt}, + "result": True, + "comment": "All events seen in 0.0 seconds.", + } + ) + ret = saltmod.wait_for_event( + name, copy.deepcopy(tgt), event_id="lost", timeout=1.0 + ) + assert ret == expected diff --git a/tests/pytests/unit/states/saltmod/test_wheel.py b/tests/pytests/unit/states/saltmod/test_wheel.py new file mode 100644 index 000000000000..fd15d3bc6e71 --- /dev/null +++ b/tests/pytests/unit/states/saltmod/test_wheel.py @@ -0,0 +1,68 @@ +import pytest + +import salt.states.saltmod as saltmod +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + saltmod: { + "__env__": "base", + "__opts__": minion_opts, + }, + } + + +def test_test_mode(): + name = "bah" + + expected = { + "name": name, + "changes": {}, + "result": None, + "comment": f"Wheel function '{name}' would be executed.", + } + + with patch.dict(saltmod.__opts__, {"test": True}): + ret = saltmod.wheel(name) + assert ret == expected + + +def test_wheel(): + """ + Test to execute a wheel module on the master + """ + name = "state" + + expected = { + "changes": {"return": True}, + "name": "state", + "result": True, + "comment": "Wheel function 'state' executed.", + } + with patch.dict( + saltmod.__salt__, {"saltutil.wheel": MagicMock(return_value={"return": True})} + ): + ret = saltmod.wheel(name) + assert ret == expected + + +def test_test_error_in_return(): + name = "bah" + + jid = "20170406104341210934" + func_ret = {"Error": "This is an Error!"} + expected = { + "name": name, + "changes": {"return": func_ret}, + "result": False, + "comment": f"Wheel function '{name}' failed.", + "__jid__": jid, + } + + mock = MagicMock(return_value={"return": func_ret, "jid": jid}) + with patch.dict(saltmod.__salt__, {"saltutil.wheel": mock}): + ret = saltmod.wheel(name) + assert ret == expected + mock.assert_called_once() diff --git a/tests/pytests/unit/states/test_boto_cloudwatch_event.py b/tests/pytests/unit/states/test_boto_cloudwatch_event.py index 1aa6a04e5436..684744464e7d 100644 --- a/tests/pytests/unit/states/test_boto_cloudwatch_event.py +++ b/tests/pytests/unit/states/test_boto_cloudwatch_event.py @@ -15,6 +15,12 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, +] + + class GlobalConfig: region = "us-east-1" access_key = "GKTADJGHEIQSXMKKRBJ08H" diff --git a/tests/pytests/unit/states/test_boto_elasticache.py b/tests/pytests/unit/states/test_boto_elasticache.py index 6dbe211d771d..3692577834c3 100644 --- a/tests/pytests/unit/states/test_boto_elasticache.py +++ b/tests/pytests/unit/states/test_boto_elasticache.py @@ -7,6 +7,10 @@ import salt.states.boto_elasticache as boto_elasticache from tests.support.mock import MagicMock, patch +pytestmark = [ + pytest.mark.slow_test, +] + @pytest.fixture def configure_loader_modules(): diff --git a/tests/pytests/unit/states/test_boto_iot.py b/tests/pytests/unit/states/test_boto_iot.py index 233ed12dac71..6da6628b6550 100644 --- a/tests/pytests/unit/states/test_boto_iot.py +++ b/tests/pytests/unit/states/test_boto_iot.py @@ -16,6 +16,12 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.slow_test, + pytest.mark.skip_on_fips_enabled_platform, +] + + class GlobalConfig: region = "us-east-1" access_key = "GKTADJGHEIQSXMKKRBJ08H" diff --git a/tests/pytests/unit/states/test_boto_lambda.py b/tests/pytests/unit/states/test_boto_lambda.py index f8885a8491e5..400af9b23c8b 100644 --- a/tests/pytests/unit/states/test_boto_lambda.py +++ b/tests/pytests/unit/states/test_boto_lambda.py @@ -20,6 +20,11 @@ log = logging.getLogger(__name__) +pytestmark = [ + pytest.mark.slow_test, +] + + class GlobalConfig: region = "us-east-1" access_key = "GKTADJGHEIQSXMKKRBJ08H" diff --git a/tests/pytests/unit/states/test_chocolatey.py b/tests/pytests/unit/states/test_chocolatey.py index 1109b1a13422..5f24d3cb9aef 100644 --- a/tests/pytests/unit/states/test_chocolatey.py +++ b/tests/pytests/unit/states/test_chocolatey.py @@ -1,3 +1,6 @@ +""" +Unit tests for chocolatey state +""" import logging import pytest @@ -74,6 +77,7 @@ def test_source_present(list_sources): chocolatey.__salt__, { "chocolatey.list_sources": list_sources_sideeffect, + "chocolatey.add_source": chocolatey_mod.add_source, }, ): @@ -84,11 +88,6 @@ def test_source_present(list_sources): cmd_run_all_mock = MagicMock(return_value={"retcode": 0, "stdout": stdout_ret}) cmd_run_which_mock = MagicMock(return_value=choco_path) with patch.dict( - chocolatey.__salt__, - { - "chocolatey.add_source": chocolatey_mod.add_source, - }, - ), patch.dict( chocolatey_mod.__salt__, { "cmd.which": cmd_run_which_mock, diff --git a/tests/pytests/unit/states/test_cmd.py b/tests/pytests/unit/states/test_cmd.py index 682ee621e7d4..4a839c2f91ac 100644 --- a/tests/pytests/unit/states/test_cmd.py +++ b/tests/pytests/unit/states/test_cmd.py @@ -77,7 +77,9 @@ def test_run(): with patch.dict(cmd.__opts__, {"test": True}): comt = 'Command "cmd.script" would have been executed' - ret.update({"comment": comt, "result": None, "changes": {}}) + ret.update( + {"comment": comt, "result": None, "changes": {"cmd": "cmd.script"}} + ) assert cmd.run(name) == ret diff --git a/tests/pytests/unit/states/test_file.py b/tests/pytests/unit/states/test_file.py index fba4cff883fc..b7e239045b4e 100644 --- a/tests/pytests/unit/states/test_file.py +++ b/tests/pytests/unit/states/test_file.py @@ -4,17 +4,20 @@ import salt.modules.file as filemod import salt.states.file as file -from tests.support.mock import call, create_autospec, patch +from tests.support.mock import MagicMock, call, create_autospec, patch -@pytest.fixture(autouse=True) -def setup_loader(request): - setup_loader_modules = {file: {"__opts__": {"test": False}}} - with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock: - yield loader_mock +@pytest.fixture +def configure_loader_modules(minion_opts): + return { + file: { + "__opts__": {"test": False}, + "__env__": "base", + } + } -@pytest.fixture() +@pytest.fixture def fake_remove(): fake_remove_mod = create_autospec(filemod.remove) with patch.dict(file.__salt__, {"file.remove": fake_remove_mod}): @@ -67,3 +70,22 @@ def test_file_copy_should_use_provided_force_mode_for_file_remove(fake_remove): file.copy_("/tmp/foo", source="/tmp/bar", group="fnord", force=True, mode=777) fake_remove.assert_called_with("/tmp/foo", force=True) + + +def test_file_recurse_directory_test(): + salt_dunder = { + "cp.list_master_dirs": MagicMock(return_value=[]), + "file.source_list": MagicMock(return_value=("salt://does_not_exist", "")), + } + with patch.dict(file.__salt__, salt_dunder): + ret = file.recurse("/tmp/test", "salt://does_not_exist", saltenv="base") + assert ret == { + "changes": {}, + "comment": "The directory 'does_not_exist' does not exist on the salt fileserver in saltenv 'base'", + "name": "/tmp/test", + "result": False, + } + salt_dunder["cp.list_master_dirs"].assert_called_once_with( + saltenv="base", + prefix="does_not_exist/", + ) diff --git a/tests/pytests/unit/states/test_gem.py b/tests/pytests/unit/states/test_gem.py new file mode 100644 index 000000000000..6d3a7ac001d8 --- /dev/null +++ b/tests/pytests/unit/states/test_gem.py @@ -0,0 +1,137 @@ +""" + Tests of salt.states.gem +""" + + +import pytest + +import salt.states.gem as gem +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {gem: {"__opts__": {"test": False}}} + + +def test_installed(): + gems = {"foo": ["1.0"], "bar": ["2.0"]} + gem_list = MagicMock(return_value=gems) + gem_install_succeeds = MagicMock(return_value=True) + gem_install_fails = MagicMock(return_value=False) + + with patch.dict(gem.__salt__, {"gem.list": gem_list}): + with patch.dict(gem.__salt__, {"gem.install": gem_install_succeeds}): + ret = gem.installed("foo") + assert ret["result"] is True + ret = gem.installed("quux") + assert ret["result"] is True + gem_install_succeeds.assert_called_once_with( + "quux", + pre_releases=False, + ruby=None, + runas=None, + version=None, + proxy=None, + rdoc=False, + source=None, + ri=False, + gem_bin=None, + ) + + with patch.dict(gem.__salt__, {"gem.install": gem_install_fails}): + ret = gem.installed("quux") + assert ret["result"] is False + gem_install_fails.assert_called_once_with( + "quux", + pre_releases=False, + ruby=None, + runas=None, + version=None, + proxy=None, + rdoc=False, + source=None, + ri=False, + gem_bin=None, + ) + + +def test_installed_version(): + gems = {"foo": ["1.0"], "bar": ["2.0"]} + gem_list = MagicMock(return_value=gems) + gem_install_succeeds = MagicMock(return_value=True) + + with patch.dict(gem.__salt__, {"gem.list": gem_list}): + with patch.dict(gem.__salt__, {"gem.install": gem_install_succeeds}): + ret = gem.installed("foo", version=">= 1.0") + assert ret["result"] is True + assert ret["comment"] == "Installed Gem meets version requirements." + + +def test_removed(): + gems = ["foo", "bar"] + gem_list = MagicMock(return_value=gems) + gem_uninstall_succeeds = MagicMock(return_value=True) + gem_uninstall_fails = MagicMock(return_value=False) + with patch.dict(gem.__salt__, {"gem.list": gem_list}): + with patch.dict(gem.__salt__, {"gem.uninstall": gem_uninstall_succeeds}): + ret = gem.removed("quux") + assert ret["result"] is True + ret = gem.removed("foo") + assert ret["result"] is True + gem_uninstall_succeeds.assert_called_once_with( + "foo", None, runas=None, gem_bin=None + ) + + with patch.dict(gem.__salt__, {"gem.uninstall": gem_uninstall_fails}): + ret = gem.removed("bar") + assert ret["result"] is False + gem_uninstall_fails.assert_called_once_with( + "bar", None, runas=None, gem_bin=None + ) + + +def test_sources_add(): + gem_sources = ["http://foo", "http://bar"] + gem_sources_list = MagicMock(return_value=gem_sources) + gem_sources_add_succeeds = MagicMock(return_value=True) + gem_sources_add_fails = MagicMock(return_value=False) + with patch.dict(gem.__salt__, {"gem.sources_list": gem_sources_list}): + with patch.dict(gem.__salt__, {"gem.sources_add": gem_sources_add_succeeds}): + ret = gem.sources_add("http://foo") + assert ret["result"] is True + ret = gem.sources_add("http://fui") + assert ret["result"] is True + gem_sources_add_succeeds.assert_called_once_with( + source_uri="http://fui", ruby=None, runas=None + ) + with patch.dict(gem.__salt__, {"gem.sources_add": gem_sources_add_fails}): + ret = gem.sources_add("http://fui") + assert ret["result"] is False + gem_sources_add_fails.assert_called_once_with( + source_uri="http://fui", ruby=None, runas=None + ) + + +def test_sources_remove(): + gem_sources = ["http://foo", "http://bar"] + gem_sources_list = MagicMock(return_value=gem_sources) + gem_sources_remove_succeeds = MagicMock(return_value=True) + gem_sources_remove_fails = MagicMock(return_value=False) + with patch.dict(gem.__salt__, {"gem.sources_list": gem_sources_list}): + with patch.dict( + gem.__salt__, {"gem.sources_remove": gem_sources_remove_succeeds} + ): + ret = gem.sources_remove("http://fui") + assert ret["result"] is True + ret = gem.sources_remove("http://foo") + assert ret["result"] is True + gem_sources_remove_succeeds.assert_called_once_with( + source_uri="http://foo", ruby=None, runas=None + ) + with patch.dict(gem.__salt__, {"gem.sources_remove": gem_sources_remove_fails}): + ret = gem.sources_remove("http://bar") + assert ret["result"] is False + gem_sources_remove_fails.assert_called_once_with( + source_uri="http://bar", ruby=None, runas=None + ) diff --git a/tests/pytests/unit/states/test_glusterfs.py b/tests/pytests/unit/states/test_glusterfs.py new file mode 100644 index 000000000000..e0abdc74fae3 --- /dev/null +++ b/tests/pytests/unit/states/test_glusterfs.py @@ -0,0 +1,418 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.states.glusterfs +""" + +import pytest + +import salt.states.glusterfs as glusterfs +import salt.utils.cloud +import salt.utils.network +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {glusterfs: {}} + + +def test_peered(): + """ + Test to verify if node is peered. + """ + name = "server1" + + ret = {"name": name, "result": True, "comment": "", "changes": {}} + + mock_ip = MagicMock(return_value=["1.2.3.4", "1.2.3.5"]) + mock_ip6 = MagicMock(return_value=["2001:db8::1"]) + mock_host_ips = MagicMock(return_value=["1.2.3.5"]) + mock_peer = MagicMock(return_value=True) + mock_status = MagicMock(return_value={"uuid1": {"hostnames": [name]}}) + + with patch.dict( + glusterfs.__salt__, + {"glusterfs.peer_status": mock_status, "glusterfs.peer": mock_peer}, + ): + with patch.object(salt.utils.network, "ip_addrs", mock_ip), patch.object( + salt.utils.network, "ip_addrs6", mock_ip6 + ), patch.object(salt.utils.network, "host_to_ips", mock_host_ips): + comt = "Peering with localhost is not needed" + ret.update({"comment": comt}) + assert glusterfs.peered(name) == ret + + mock_host_ips.return_value = ["127.0.1.1"] + comt = "Peering with localhost is not needed" + ret.update({"comment": comt}) + assert glusterfs.peered(name) == ret + + mock_host_ips.return_value = ["2001:db8::1"] + assert glusterfs.peered(name) == ret + + mock_host_ips.return_value = ["1.2.3.42"] + comt = "Host {} already peered".format(name) + ret.update({"comment": comt}) + assert glusterfs.peered(name) == ret + + with patch.dict(glusterfs.__opts__, {"test": False}): + old = {"uuid1": {"hostnames": ["other1"]}} + new = { + "uuid1": {"hostnames": ["other1"]}, + "uuid2": {"hostnames": ["someAlias", name]}, + } + mock_status.side_effect = [old, new] + comt = "Host {} successfully peered".format(name) + ret.update({"comment": comt, "changes": {"old": old, "new": new}}) + assert glusterfs.peered(name) == ret + mock_status.side_effect = None + + mock_status.return_value = {"uuid1": {"hostnames": ["other"]}} + mock_peer.return_value = False + + ret.update({"result": False}) + + comt = "Failed to peer with {}, please check logs for errors".format( + name + ) + ret.update({"comment": comt, "changes": {}}) + assert glusterfs.peered(name) == ret + + comt = "Invalid characters in peer name." + ret.update({"comment": comt, "name": ":/"}) + assert glusterfs.peered(":/") == ret + ret.update({"name": name}) + + with patch.dict(glusterfs.__opts__, {"test": True}): + comt = "Peer {} will be added.".format(name) + ret.update({"comment": comt, "result": None}) + assert glusterfs.peered(name) == ret + + +def test_volume_present(): + """ + Test to ensure that a volume exists + """ + name = "salt" + bricks = ["host1:/brick1"] + ret = {"name": name, "result": True, "comment": "", "changes": {}} + + started_info = {name: {"status": "1"}} + stopped_info = {name: {"status": "0"}} + + mock_info = MagicMock() + mock_list = MagicMock() + mock_create = MagicMock() + mock_start = MagicMock(return_value=True) + + with patch.dict( + glusterfs.__salt__, + { + "glusterfs.info": mock_info, + "glusterfs.list_volumes": mock_list, + "glusterfs.create_volume": mock_create, + "glusterfs.start_volume": mock_start, + }, + ): + with patch.dict(glusterfs.__opts__, {"test": False}): + mock_list.return_value = [name] + mock_info.return_value = started_info + comt = "Volume {} already exists and is started".format(name) + ret.update({"comment": comt}) + assert glusterfs.volume_present(name, bricks, start=True) == ret + + mock_info.return_value = stopped_info + comt = "Volume {} already exists and is now started".format(name) + ret.update( + {"comment": comt, "changes": {"old": "stopped", "new": "started"}} + ) + assert glusterfs.volume_present(name, bricks, start=True) == ret + + comt = "Volume {} already exists".format(name) + ret.update({"comment": comt, "changes": {}}) + assert glusterfs.volume_present(name, bricks, start=False) == ret + with patch.dict(glusterfs.__opts__, {"test": True}): + comt = "Volume {} already exists".format(name) + ret.update({"comment": comt, "result": None}) + assert glusterfs.volume_present(name, bricks, start=False) == ret + + comt = "Volume {} already exists and will be started".format(name) + ret.update({"comment": comt, "result": None}) + assert glusterfs.volume_present(name, bricks, start=True) == ret + + mock_list.return_value = [] + comt = "Volume {} will be created".format(name) + ret.update({"comment": comt, "result": None}) + assert glusterfs.volume_present(name, bricks, start=False) == ret + + comt = "Volume {} will be created and started".format(name) + ret.update({"comment": comt, "result": None}) + assert glusterfs.volume_present(name, bricks, start=True) == ret + + with patch.dict(glusterfs.__opts__, {"test": False}): + mock_list.side_effect = [[], [name]] + comt = "Volume {} is created".format(name) + ret.update( + { + "comment": comt, + "result": True, + "changes": {"old": [], "new": [name]}, + } + ) + assert glusterfs.volume_present(name, bricks, start=False) == ret + + mock_list.side_effect = [[], [name]] + comt = "Volume {} is created and is now started".format(name) + ret.update({"comment": comt, "result": True}) + assert glusterfs.volume_present(name, bricks, start=True) == ret + + mock_list.side_effect = None + mock_list.return_value = [] + mock_create.return_value = False + comt = "Creation of volume {} failed".format(name) + ret.update({"comment": comt, "result": False, "changes": {}}) + assert glusterfs.volume_present(name, bricks) == ret + + with patch.object(salt.utils.cloud, "check_name", MagicMock(return_value=True)): + comt = "Invalid characters in volume name." + ret.update({"comment": comt, "result": False}) + assert glusterfs.volume_present(name, bricks) == ret + + +def test_started(): + """ + Test to check if volume has been started + """ + name = "salt" + + ret = {"name": name, "result": False, "comment": "", "changes": {}} + + started_info = {name: {"status": "1"}} + stopped_info = {name: {"status": "0"}} + mock_info = MagicMock(return_value={}) + mock_start = MagicMock(return_value=True) + + with patch.dict( + glusterfs.__salt__, + {"glusterfs.info": mock_info, "glusterfs.start_volume": mock_start}, + ): + comt = "Volume {} does not exist".format(name) + ret.update({"comment": comt}) + assert glusterfs.started(name) == ret + + mock_info.return_value = started_info + comt = "Volume {} is already started".format(name) + ret.update({"comment": comt, "result": True}) + assert glusterfs.started(name) == ret + + with patch.dict(glusterfs.__opts__, {"test": True}): + mock_info.return_value = stopped_info + comt = "Volume {} will be started".format(name) + ret.update({"comment": comt, "result": None}) + assert glusterfs.started(name) == ret + + with patch.dict(glusterfs.__opts__, {"test": False}): + comt = "Volume {} is started".format(name) + ret.update( + { + "comment": comt, + "result": True, + "change": {"new": "started", "old": "stopped"}, + } + ) + assert glusterfs.started(name) == ret + + +def test_add_volume_bricks(): + """ + Test to add brick(s) to an existing volume + """ + name = "salt" + bricks = ["host1:/drive1"] + old_bricks = ["host1:/drive2"] + + ret = {"name": name, "result": False, "comment": "", "changes": {}} + + stopped_volinfo = {"salt": {"status": "0"}} + volinfo = {"salt": {"status": "1", "bricks": {"brick1": {"path": old_bricks[0]}}}} + new_volinfo = { + "salt": { + "status": "1", + "bricks": { + "brick1": {"path": old_bricks[0]}, + "brick2": {"path": bricks[0]}, + }, + } + } + + mock_info = MagicMock(return_value={}) + mock_add = MagicMock(side_effect=[False, True]) + + with patch.dict( + glusterfs.__salt__, + {"glusterfs.info": mock_info, "glusterfs.add_volume_bricks": mock_add}, + ): + ret.update({"comment": "Volume salt does not exist"}) + assert glusterfs.add_volume_bricks(name, bricks) == ret + + mock_info.return_value = stopped_volinfo + ret.update({"comment": "Volume salt is not started"}) + assert glusterfs.add_volume_bricks(name, bricks) == ret + + mock_info.return_value = volinfo + ret.update({"comment": "Adding bricks to volume salt failed"}) + assert glusterfs.add_volume_bricks(name, bricks) == ret + + ret.update({"result": True}) + ret.update({"comment": "Bricks already added in volume salt"}) + assert glusterfs.add_volume_bricks(name, old_bricks) == ret + + mock_info.side_effect = [volinfo, new_volinfo] + ret.update( + { + "comment": "Bricks successfully added to volume salt", + "changes": {"new": bricks + old_bricks, "old": old_bricks}, + } + ) + # Let's sort ourselves because the test under python 3 sometimes fails + # just because of the new changes list order + result = glusterfs.add_volume_bricks(name, bricks) + ret["changes"]["new"] = sorted(ret["changes"]["new"]) + result["changes"]["new"] = sorted(result["changes"]["new"]) + assert result == ret + + +def test_op_version(): + """ + Test setting the Glusterfs op-version + """ + name = "salt" + current = 30707 + new = 31200 + + ret = {"name": name, "result": False, "comment": "", "changes": {}} + + mock_get_version = MagicMock(return_value={}) + mock_set_version = MagicMock(return_value={}) + + with patch.dict( + glusterfs.__salt__, + { + "glusterfs.get_op_version": mock_get_version, + "glusterfs.set_op_version": mock_set_version, + }, + ): + mock_get_version.return_value = [False, "some error message"] + ret.update({"result": False}) + ret.update({"comment": "some error message"}) + assert glusterfs.op_version(name, current) == ret + + mock_get_version.return_value = current + ret.update({"result": True}) + ret.update( + { + "comment": ( + "Glusterfs cluster.op-version for {} already set to {}".format( + name, current + ) + ) + } + ) + assert glusterfs.op_version(name, current) == ret + + with patch.dict(glusterfs.__opts__, {"test": True}): + mock_set_version.return_value = [False, "Failed to set version"] + ret.update({"result": None}) + ret.update( + { + "comment": ( + "An attempt would be made to set the cluster.op-version for" + " {} to {}.".format(name, new) + ) + } + ) + assert glusterfs.op_version(name, new) == ret + + with patch.dict(glusterfs.__opts__, {"test": False}): + mock_set_version.return_value = [False, "Failed to set version"] + ret.update({"result": False}) + ret.update({"comment": "Failed to set version"}) + assert glusterfs.op_version(name, new) == ret + + mock_set_version.return_value = "some success message" + ret.update({"comment": "some success message"}) + ret.update({"changes": {"old": current, "new": new}}) + ret.update({"result": True}) + assert glusterfs.op_version(name, new) == ret + + +def test_max_op_version(): + """ + Test setting the Glusterfs to its self reported max-op-version + """ + name = "salt" + current = 30707 + new = 31200 + + ret = {"name": name, "result": False, "comment": "", "changes": {}} + + mock_get_version = MagicMock(return_value={}) + mock_get_max_op_version = MagicMock(return_value={}) + mock_set_version = MagicMock(return_value={}) + + with patch.dict( + glusterfs.__salt__, + { + "glusterfs.get_op_version": mock_get_version, + "glusterfs.set_op_version": mock_set_version, + "glusterfs.get_max_op_version": mock_get_max_op_version, + }, + ): + mock_get_version.return_value = [False, "some error message"] + ret.update({"result": False}) + ret.update({"comment": "some error message"}) + assert glusterfs.max_op_version(name) == ret + + mock_get_version.return_value = current + mock_get_max_op_version.return_value = [False, "some error message"] + ret.update({"result": False}) + ret.update({"comment": "some error message"}) + assert glusterfs.max_op_version(name) == ret + + mock_get_version.return_value = current + mock_get_max_op_version.return_value = current + ret.update({"result": True}) + ret.update( + { + "comment": ( + "The cluster.op-version is already set to the" + " cluster.max-op-version of {}".format(current) + ) + } + ) + assert glusterfs.max_op_version(name) == ret + + with patch.dict(glusterfs.__opts__, {"test": True}): + mock_get_max_op_version.return_value = new + ret.update({"result": None}) + ret.update( + { + "comment": ( + "An attempt would be made to set the cluster.op-version" + " to {}.".format(new) + ) + } + ) + assert glusterfs.max_op_version(name) == ret + + with patch.dict(glusterfs.__opts__, {"test": False}): + mock_set_version.return_value = [False, "Failed to set version"] + ret.update({"result": False}) + ret.update({"comment": "Failed to set version"}) + assert glusterfs.max_op_version(name) == ret + + mock_set_version.return_value = "some success message" + ret.update({"comment": "some success message"}) + ret.update({"changes": {"old": current, "new": new}}) + ret.update({"result": True}) + assert glusterfs.max_op_version(name) == ret diff --git a/tests/pytests/unit/states/test_gnomedesktop.py b/tests/pytests/unit/states/test_gnomedesktop.py new file mode 100644 index 000000000000..0229e7345929 --- /dev/null +++ b/tests/pytests/unit/states/test_gnomedesktop.py @@ -0,0 +1,48 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.states.gnomedesktop +""" + + +import pytest + +import salt.states.gnomedesktop as gnomedesktop + + +@pytest.fixture +def configure_loader_modules(): + return {gnomedesktop: {}} + + +def test_wm_preferences(): + """ + Test to sets values in the org.gnome.desktop.wm.preferences schema + """ + name = "salt" + + ret = {"name": name, "result": True, "comment": "", "changes": {}} + + assert gnomedesktop.wm_preferences(name) == ret + + +def test_desktop_lockdown(): + """ + Test to sets values in the org.gnome.desktop.lockdown schema + """ + name = "salt" + + ret = {"name": name, "result": True, "comment": "", "changes": {}} + + assert gnomedesktop.desktop_lockdown(name) == ret + + +def test_desktop_interface(): + """ + Test to sets values in the org.gnome.desktop.interface schema + """ + name = "salt" + + ret = {"name": name, "result": True, "comment": "", "changes": {}} + + assert gnomedesktop.desktop_interface(name) == ret diff --git a/tests/pytests/unit/states/test_helm.py b/tests/pytests/unit/states/test_helm.py new file mode 100644 index 000000000000..d7d78432e17a --- /dev/null +++ b/tests/pytests/unit/states/test_helm.py @@ -0,0 +1,275 @@ +""" + Test cases for salt.modules.helm +""" + + +import pytest + +import salt.states.helm as helm +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {helm: {}} + + +def test_repo_managed_import_failed_repo_manage(): + ret = { + "name": "state_id", + "changes": {}, + "result": False, + "comment": "'helm.repo_manage' modules not available on this minion.", + } + assert helm.repo_managed("state_id") == ret + + +def test_repo_managed_import_failed_repo_update(): + mock_helm_modules = {"helm.repo_manage": MagicMock(return_value=True)} + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "changes": {}, + "result": False, + "comment": "'helm.repo_update' modules not available on this minion.", + } + assert helm.repo_managed("state_id") == ret + + +def test_repo_managed_is_testing(): + mock_helm_modules = { + "helm.repo_manage": MagicMock(return_value=True), + "helm.repo_update": MagicMock(return_value=True), + } + with patch.dict(helm.__salt__, mock_helm_modules): + mock__opts__ = {"test": MagicMock(return_value=True)} + with patch.dict(helm.__opts__, mock__opts__): + ret = { + "name": "state_id", + "result": None, + "comment": "Helm repo would have been managed.", + "changes": {}, + } + assert helm.repo_managed("state_id") == ret + + +def test_repo_managed_success(): + result_changes = {"added": True, "removed": True, "failed": False} + mock_helm_modules = { + "helm.repo_manage": MagicMock(return_value=result_changes), + "helm.repo_update": MagicMock(return_value=True), + } + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "result": True, + "comment": "Repositories were added or removed.", + "changes": result_changes, + } + assert helm.repo_managed("state_id") == ret + + +def test_repo_managed_success_with_update(): + result_changes = {"added": True, "removed": True, "failed": False} + mock_helm_modules = { + "helm.repo_manage": MagicMock(return_value=result_changes), + "helm.repo_update": MagicMock(return_value=True), + } + result_wanted = result_changes + result_wanted.update({"repo_update": True}) + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "result": True, + "comment": "Repositories were added or removed.", + "changes": result_wanted, + } + assert helm.repo_managed("state_id") == ret + + +def test_repo_managed_failed(): + result_changes = {"added": True, "removed": True, "failed": True} + mock_helm_modules = { + "helm.repo_manage": MagicMock(return_value=result_changes), + "helm.repo_update": MagicMock(return_value=True), + } + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "result": False, + "comment": "Failed to add or remove some repositories.", + "changes": result_changes, + } + assert helm.repo_managed("state_id") == ret + + +def test_repo_updated_import_failed(): + ret = { + "name": "state_id", + "changes": {}, + "result": False, + "comment": "'helm.repo_update' modules not available on this minion.", + } + assert helm.repo_updated("state_id") == ret + + +def test_repo_updated_is_testing(): + mock_helm_modules = {"helm.repo_update": MagicMock(return_value=True)} + with patch.dict(helm.__salt__, mock_helm_modules): + mock__opts__ = {"test": MagicMock(return_value=True)} + with patch.dict(helm.__opts__, mock__opts__): + ret = { + "name": "state_id", + "result": None, + "comment": "Helm repo would have been updated.", + "changes": {}, + } + assert helm.repo_updated("state_id") == ret + + +def test_repo_updated_success(): + mock_helm_modules = {"helm.repo_update": MagicMock(return_value=True)} + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "result": True, + "comment": "Helm repo is updated.", + "changes": {}, + } + assert helm.repo_updated("state_id") == ret + + +def test_repo_updated_failed(): + mock_helm_modules = {"helm.repo_update": MagicMock(return_value=False)} + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "result": False, + "comment": "Failed to sync some repositories.", + "changes": False, + } + assert helm.repo_updated("state_id") == ret + + +def test_release_present_import_failed_helm_status(): + ret = { + "name": "state_id", + "changes": {}, + "result": False, + "comment": "'helm.status' modules not available on this minion.", + } + assert helm.release_present("state_id", "mychart") == ret + + +def test_release_present_import_failed_helm_install(): + mock_helm_modules = {"helm.status": MagicMock(return_value=True)} + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "changes": {}, + "result": False, + "comment": "'helm.install' modules not available on this minion.", + } + assert helm.release_present("state_id", "mychart") == ret + + +def test_release_present_import_failed_helm_upgrade(): + mock_helm_modules = { + "helm.status": MagicMock(return_value=True), + "helm.install": MagicMock(return_value=True), + } + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "changes": {}, + "result": False, + "comment": "'helm.upgrade' modules not available on this minion.", + } + assert helm.release_present("state_id", "mychart") == ret + + +def test_release_present_is_testing(): + mock_helm_modules = { + "helm.status": MagicMock(return_value=True), + "helm.install": MagicMock(return_value=True), + "helm.upgrade": MagicMock(return_value=True), + } + with patch.dict(helm.__salt__, mock_helm_modules): + mock__opts__ = {"test": MagicMock(return_value=True)} + with patch.dict(helm.__opts__, mock__opts__): + ret = { + "name": "state_id", + "result": None, + "comment": "Helm release would have been installed or updated.", + "changes": {}, + } + assert helm.release_present("state_id", "mychart") == ret + + +def test_release_absent_import_failed_helm_uninstall(): + ret = { + "name": "state_id", + "changes": {}, + "result": False, + "comment": "'helm.uninstall' modules not available on this minion.", + } + assert helm.release_absent("state_id") == ret + + +def test_release_absent_import_failed_helm_status(): + mock_helm_modules = {"helm.uninstall": MagicMock(return_value=True)} + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "changes": {}, + "result": False, + "comment": "'helm.status' modules not available on this minion.", + } + assert helm.release_absent("state_id") == ret + + +def test_release_absent_is_testing(): + mock_helm_modules = { + "helm.status": MagicMock(return_value=True), + "helm.uninstall": MagicMock(return_value=True), + } + with patch.dict(helm.__salt__, mock_helm_modules): + mock__opts__ = {"test": MagicMock(return_value=True)} + with patch.dict(helm.__opts__, mock__opts__): + ret = { + "name": "state_id", + "result": None, + "comment": "Helm release would have been uninstalled.", + "changes": {}, + } + assert helm.release_absent("state_id") == ret + + +def test_release_absent_success(): + mock_helm_modules = { + "helm.status": MagicMock(return_value={}), + "helm.uninstall": MagicMock(return_value=True), + } + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "result": True, + "comment": "Helm release state_id is absent.", + "changes": {"absent": "state_id"}, + } + assert helm.release_absent("state_id") == ret + + +def test_release_absent_error(): + mock_helm_modules = { + "helm.status": MagicMock(return_value={}), + "helm.uninstall": MagicMock(return_value="error"), + } + with patch.dict(helm.__salt__, mock_helm_modules): + ret = { + "name": "state_id", + "result": False, + "comment": "error", + "changes": {}, + } + assert helm.release_absent("state_id") == ret diff --git a/tests/pytests/unit/states/test_hg.py b/tests/pytests/unit/states/test_hg.py new file mode 100644 index 000000000000..1c50ba4b97a3 --- /dev/null +++ b/tests/pytests/unit/states/test_hg.py @@ -0,0 +1,140 @@ +""" + :codeauthor: Rahul Handay + + Test cases for salt.modules.hg +""" + +import os + +import pytest + +import salt.states.hg as hg +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {hg: {}} + + +def test_latest(): + """ + Test to Make sure the repository is cloned to + the given directory and is up to date + """ + ret = {"changes": {}, "comment": "", "name": "salt", "result": True} + mock = MagicMock(return_value=True) + with patch.object(hg, "_fail", mock): + assert hg.latest("salt") + + mock = MagicMock(side_effect=[False, True, False, False, False, False]) + with patch.object(os.path, "isdir", mock): + mock = MagicMock(return_value=True) + with patch.object(hg, "_handle_existing", mock): + assert hg.latest("salt", target="c:\\salt") + + with patch.dict(hg.__opts__, {"test": True}): + mock = MagicMock(return_value=True) + with patch.object(hg, "_neutral_test", mock): + assert hg.latest("salt", target="c:\\salt") + + with patch.dict(hg.__opts__, {"test": False}): + mock = MagicMock(return_value=True) + with patch.object(hg, "_clone_repo", mock): + assert hg.latest("salt", target="c:\\salt") == ret + + +def test_latest_update_changes(): + """ + Test to make sure we don't update even if we have changes + """ + ret = {"changes": {}, "comment": "", "name": "salt", "result": True} + revision_mock = MagicMock(return_value="abcdef") + pull_mock = MagicMock(return_value="Blah.") + update_mock = MagicMock() + + with patch.dict( + hg.__salt__, + { + "hg.revision": revision_mock, + "hg.pull": pull_mock, + "hg.update": update_mock, + }, + ): + mock = MagicMock(side_effect=[True, True]) + with patch.object(os.path, "isdir", mock): + mock = MagicMock(return_value=True) + with patch.dict(hg.__opts__, {"test": False}): + with patch.object(hg, "_clone_repo", mock): + assert hg.latest("salt", target="c:\\salt", update_head=True) == ret + assert update_mock.called + + +def test_latest_no_update_changes(): + """ + Test to make sure we don't update even if we have changes + """ + ret = { + "changes": {}, + "comment": ( + "Update is probably required but update_head=False so we will skip" + " updating." + ), + "name": "salt", + "result": True, + } + revision_mock = MagicMock(return_value="abcdef") + pull_mock = MagicMock(return_value="Blah.") + update_mock = MagicMock() + + with patch.dict( + hg.__salt__, + { + "hg.revision": revision_mock, + "hg.pull": pull_mock, + "hg.update": update_mock, + }, + ): + mock = MagicMock(side_effect=[True, True]) + with patch.object(os.path, "isdir", mock): + mock = MagicMock(return_value=True) + with patch.dict(hg.__opts__, {"test": False}): + with patch.object(hg, "_clone_repo", mock): + assert ( + hg.latest("salt", target="c:\\salt", update_head=False) == ret + ) + assert not update_mock.called + + +def test_latest_no_update_no_changes(): + """ + Test to Make sure the repository is cloned to + the given directory and is up to date + """ + ret = { + "changes": {}, + "comment": "No changes found and update_head=False so will skip updating.", + "name": "salt", + "result": True, + } + revision_mock = MagicMock(return_value="abcdef") + pull_mock = MagicMock(return_value="Blah no changes found.") + update_mock = MagicMock() + + with patch.dict( + hg.__salt__, + { + "hg.revision": revision_mock, + "hg.pull": pull_mock, + "hg.update": update_mock, + }, + ): + mock = MagicMock(side_effect=[True, True]) + with patch.object(os.path, "isdir", mock): + mock = MagicMock(return_value=True) + with patch.dict(hg.__opts__, {"test": False}): + with patch.object(hg, "_clone_repo", mock): + assert ( + hg.latest("salt", target="c:\\salt", update_head=False) == ret + ) + assert not update_mock.called diff --git a/tests/pytests/unit/states/test_http.py b/tests/pytests/unit/states/test_http.py index a672845e5c32..85150b4a2a81 100644 --- a/tests/pytests/unit/states/test_http.py +++ b/tests/pytests/unit/states/test_http.py @@ -42,26 +42,124 @@ def test_query(): with patch.dict(http.__salt__, {"http.query": mock}): assert http.query("salt", "Dude", "stack") == ret[1] + with patch.dict(http.__opts__, {"test": False}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": True, + "comment": "Status 200 was found.", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query(name="http://example.com/", status=200, decode=False) + == expected + ) + + with patch.dict(http.__opts__, {"test": False}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": True, + "comment": "Status 200 was found.", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.wait_for_successful_query": mock}): + assert ( + http.query(name="http://example.com/", status=200, wait_for=300) + == expected + ) + + with patch.dict(http.__opts__, {"test": True}): + mock = MagicMock(return_value={"body": "http body", "status": 200}) + expected = { + "name": "http://example.com/", + "result": None, + "comment": "Status 200 was found. (TEST MODE, TEST URL WAS: http://status.example.com)", + "changes": {}, + "data": {"body": "http body", "status": 200}, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + name="http://example.com/", + status=200, + test_url="http://status.example.com", + ) + == expected + ) + def test_query_pcre_statustype(): """ Test to perform an HTTP query with a regex used to match the status code and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": ( - 'Match text "This page returned" was found. Status pattern "200|201" was' - " found." - ), - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ( + 'Match text "This page returned" was found. Status pattern "200|201" was' + " found." + ), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + status="200|201", + status_type="pcre", + ) + == state_return + ) + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Status pattern "200|201" was found.'), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + status="200|201", + status_type="pcre", + ) + == state_return + ) + + http_result = {"text": "This page returned a 403 status code", "status": "403"} + mock = MagicMock(return_value=http_result) + + state_return = { + "name": "salturl", + "result": False, + "comment": 'Match text "This page returned" was found. Status pattern "200|201" was not found.', + "changes": {}, + "data": {"text": "This page returned a 403 status code", "status": "403"}, + } + with patch.dict(http.__salt__, {"http.query": mock}): assert ( http.query( @@ -74,23 +172,84 @@ def test_query_pcre_statustype(): ) +def test_query_pcre_matchtype(): + """ + Test to perform an HTTP query with a regex used to match the returned text and statefully return the result + """ + testurl = "salturl" + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Match pattern "This page returned" was found.'), + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + match_type="pcre", + ) + == state_return + ) + + http_result = { + "text": "This page did not return a 201 status code", + "status": "403", + } + mock = MagicMock(return_value=http_result) + + state_return = { + "changes": {}, + "comment": ('Match pattern "This page returned" was not found.'), + "data": { + "status": "403", + "text": "This page did not return a 201 status code", + }, + "name": testurl, + "result": False, + } + + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned", + match_type="pcre", + ) + == state_return + ) + + def test_query_stringstatustype(): """ Test to perform an HTTP query with a string status code and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": 'Match text "This page returned" was found. Status 201 was found.', - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "changes": {}, + "comment": 'Match text "This page returned" was found. Status 201 was found.', + "data": { + "status": "201", + "text": "This page returned a 201 status code", + }, + "name": testurl, + "result": True, + } + assert ( http.query( testurl, @@ -101,22 +260,80 @@ def test_query_stringstatustype(): == state_return ) + http_result = {"text": "This page returned a 403 status code", "status": "403"} + mock = MagicMock(return_value=http_result) + + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "name": "salturl", + "result": False, + "comment": 'Match text "This page returned" was found. Status 201 was not found.', + "changes": {}, + "data": { + "text": "This page returned a 403 status code", + "status": "403", + }, + } + + assert ( + http.query( + testurl, + match="This page returned", + status="201", + status_type="string", + ) + == state_return + ) + + +def test_query_invalidstatustype(): + """ + Test to perform an HTTP query with a string status code and statefully return the result + """ + testurl = "salturl" + + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + mock = MagicMock(return_value=http_result) + + with patch.dict(http.__salt__, {"http.query": mock}): + state_return = { + "name": "salturl", + "result": None, + "comment": "", + "changes": {}, + "data": { + "text": "This page returned a 201 status code", + "status": "201", + }, + } + + assert ( + http.query( + testurl, + status="201", + status_type="invalid", + ) + == state_return + ) + def test_query_liststatustype(): """ Test to perform an HTTP query with a list of status codes and statefully return the result """ testurl = "salturl" - http_result = {"text": "This page returned a 201 status code", "status": "201"} - state_return = { - "changes": {}, - "comment": 'Match text "This page returned" was found. Status 201 was found.', - "data": {"status": "201", "text": "This page returned a 201 status code"}, - "name": testurl, - "result": True, - } with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + state_return = { + "changes": {}, + "comment": 'Match text "This page returned" was found. Status 201 was found.', + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + mock = MagicMock(return_value=http_result) with patch.dict(http.__salt__, {"http.query": mock}): assert ( @@ -129,6 +346,48 @@ def test_query_liststatustype(): == state_return ) + with patch.dict(http.__opts__, {"test": False}): + http_result = {"text": "This page returned a 201 status code", "status": "201"} + state_return = { + "changes": {}, + "comment": "Status 201 was found.", + "data": {"status": "201", "text": "This page returned a 201 status code"}, + "name": testurl, + "result": True, + } + + mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + status=["200", "201"], + status_type="list", + ) + == state_return + ) + + http_result = {"text": "This page returned a 403 status code", "status": "403"} + state_return = { + "name": "salturl", + "result": False, + "comment": "Match text \"This page returned a 200\" was not found. Statuses ['200', '201'] were not found.", + "changes": {}, + "data": {"text": "This page returned a 403 status code", "status": "403"}, + } + + mock = MagicMock(return_value=http_result) + with patch.dict(http.__salt__, {"http.query": mock}): + assert ( + http.query( + testurl, + match="This page returned a 200", + status=["200", "201"], + status_type="list", + ) + == state_return + ) + def test_wait_for_with_interval(): """ @@ -156,3 +415,22 @@ def test_wait_for_without_interval(): with patch("time.sleep", MagicMock()) as sleep_mock: assert http.wait_for_successful_query("url", status=200) == {"result": True} sleep_mock.assert_not_called() + + query_mock = MagicMock(return_value={"result": False}) + + with patch.object(http, "query", query_mock): + with patch( + "time.time", MagicMock(side_effect=[1697564521.9640958, 1697564822.9640958]) + ): + assert http.wait_for_successful_query("url", status=200) == { + "result": False + } + + query_mock = MagicMock(side_effect=Exception()) + + with patch.object(http, "query", query_mock): + with patch( + "time.time", MagicMock(side_effect=[1697564521.9640958, 1697564822.9640958]) + ): + with pytest.raises(Exception): + http.wait_for_successful_query("url", status=200) diff --git a/tests/pytests/unit/states/test_ini_manage.py b/tests/pytests/unit/states/test_ini_manage.py index 009e006f240a..9f0217b5b407 100644 --- a/tests/pytests/unit/states/test_ini_manage.py +++ b/tests/pytests/unit/states/test_ini_manage.py @@ -1,3 +1,8 @@ +""" + Test cases for salt.states.ini_manage +""" + + import copy import os @@ -6,7 +11,7 @@ import salt.modules.ini_manage as mod_ini_manage import salt.states.ini_manage as ini_manage from salt.utils.odict import OrderedDict -from tests.support.mock import patch +from tests.support.mock import MagicMock, patch @pytest.fixture @@ -104,3 +109,84 @@ def test_options_present_true_file(tmp_path, sections): assert os.path.exists(name) assert mod_ini_manage.get_ini(name) == sections + + +def test_options_absent(): + """ + Test to verify options absent in file. + """ + name = "salt" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + with patch.dict(ini_manage.__opts__, {"test": True}): + comt = "No changes detected." + ret.update({"comment": comt, "result": True}) + assert ini_manage.options_absent(name) == ret + + with patch.dict(ini_manage.__opts__, {"test": False}): + comt = "No anomaly detected" + ret.update({"comment": comt, "result": True}) + assert ini_manage.options_absent(name) == ret + sections = {"Tables": ["key2", "key3"]} + changes = {"Tables": {"key2": "2", "key3": "3"}} + with patch.dict( + ini_manage.__salt__, + {"ini.remove_option": MagicMock(side_effect=["2", "3"])}, + ): + with patch.dict(ini_manage.__opts__, {"test": False}): + comt = "Changes take effect" + ret.update({"comment": comt, "result": True, "changes": changes}) + assert ini_manage.options_absent(name, sections) == ret + + +def test_sections_present(): + """ + Test to verify sections present in file. + """ + name = "salt" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + with patch.dict(ini_manage.__opts__, {"test": True}): + with patch.dict( + ini_manage.__salt__, {"ini.get_ini": MagicMock(return_value=None)} + ): + comt = "No changes detected." + ret.update({"comment": comt, "result": True}) + assert ini_manage.sections_present(name) == ret + + changes = { + "first": "who is on", + "second": "what is on", + "third": "I don't know", + } + with patch.dict( + ini_manage.__salt__, {"ini.set_option": MagicMock(return_value=changes)} + ): + with patch.dict(ini_manage.__opts__, {"test": False}): + comt = "Changes take effect" + ret.update({"comment": comt, "result": True, "changes": changes}) + assert ini_manage.sections_present(name) == ret + + +def test_sections_absent(): + """ + Test to verify sections absent in file. + """ + name = "salt" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + with patch.dict(ini_manage.__opts__, {"test": True}): + with patch.dict( + ini_manage.__salt__, {"ini.get_ini": MagicMock(return_value=None)} + ): + comt = "No changes detected." + ret.update({"comment": comt, "result": True}) + assert ini_manage.sections_absent(name) == ret + + with patch.dict(ini_manage.__opts__, {"test": False}): + comt = "No anomaly detected" + ret.update({"comment": comt, "result": True}) + assert ini_manage.sections_absent(name) == ret diff --git a/tests/pytests/unit/states/test_ipmi.py b/tests/pytests/unit/states/test_ipmi.py new file mode 100644 index 000000000000..a1c00345a7cc --- /dev/null +++ b/tests/pytests/unit/states/test_ipmi.py @@ -0,0 +1,166 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.states.ipmi +""" + +import pytest + +import salt.states.ipmi as ipmi +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {ipmi: {}} + + +def test_boot_device(): + """ + Test to request power state change. + """ + name = "salt" + + ret = {"name": name, "result": True, "comment": "", "changes": {}} + + mock = MagicMock(return_value=name) + with patch.dict( + ipmi.__salt__, {"ipmi.get_bootdev": mock, "ipmi.set_bootdev": mock} + ): + comt = "system already in this state" + ret.update({"comment": comt}) + assert ipmi.boot_device(name) == ret + + with patch.dict(ipmi.__opts__, {"test": False}): + comt = "changed boot device" + ret.update( + { + "name": "default", + "comment": comt, + "result": True, + "changes": {"new": "default", "old": "salt"}, + } + ) + assert ipmi.boot_device() == ret + + with patch.dict(ipmi.__opts__, {"test": True}): + comt = "would change boot device" + ret.update({"comment": comt, "result": None}) + assert ipmi.boot_device() == ret + + +def test_power(): + """ + Test to request power state change + """ + ret = {"name": "power_on", "result": True, "comment": "", "changes": {}} + + mock = MagicMock(return_value="on") + with patch.dict(ipmi.__salt__, {"ipmi.get_power": mock, "ipmi.set_power": mock}): + comt = "system already in this state" + ret.update({"comment": comt}) + assert ipmi.power() == ret + + with patch.dict(ipmi.__opts__, {"test": False}): + comt = "changed system power" + ret.update( + { + "name": "off", + "comment": comt, + "result": True, + "changes": {"new": "off", "old": "on"}, + } + ) + assert ipmi.power("off") == ret + + with patch.dict(ipmi.__opts__, {"test": True}): + comt = "would power: off system" + ret.update({"comment": comt, "result": None}) + assert ipmi.power("off") == ret + + +def test_user_present(): + """ + Test to ensure IPMI user and user privileges. + """ + name = "salt" + + ret = {"name": name, "result": True, "comment": "", "changes": {}} + + mock_ret = { + "access": { + "callback": False, + "link_auth": True, + "ipmi_msg": True, + "privilege_level": "administrator", + } + } + mock = MagicMock(return_value=mock_ret) + mock_bool = MagicMock(side_effect=[True, False, False, False]) + with patch.dict( + ipmi.__salt__, + { + "ipmi.get_user": mock, + "ipmi.set_user_password": mock_bool, + "ipmi.ensure_user": mock_bool, + }, + ): + comt = "user already present" + ret.update({"comment": comt}) + assert ipmi.user_present(name, 5, "salt@123") == ret + + with patch.dict(ipmi.__opts__, {"test": True}): + comt = "would (re)create user" + ret.update( + { + "comment": comt, + "result": None, + "changes": {"new": "salt", "old": mock_ret}, + } + ) + assert ipmi.user_present(name, 5, "pw@123") == ret + + with patch.dict(ipmi.__opts__, {"test": False}): + comt = "(re)created user" + ret.update( + { + "comment": comt, + "result": True, + "changes": {"new": mock_ret, "old": mock_ret}, + } + ) + assert ipmi.user_present(name, 5, "pw@123") == ret + + +def test_user_absent(): + """ + Test to delete all user (uid) records having the matching name. + """ + name = "salt" + + ret = {"name": name, "result": True, "comment": "", "changes": {}} + + mock = MagicMock(side_effect=[[], [5], [5]]) + mock_bool = MagicMock(return_value=True) + with patch.dict( + ipmi.__salt__, {"ipmi.get_name_uids": mock, "ipmi.delete_user": mock_bool} + ): + comt = "user already absent" + ret.update({"comment": comt}) + assert ipmi.user_absent(name) == ret + + with patch.dict(ipmi.__opts__, {"test": True}): + comt = "would delete user(s)" + ret.update({"comment": comt, "result": None, "changes": {"delete": [5]}}) + assert ipmi.user_absent(name) == ret + + with patch.dict(ipmi.__opts__, {"test": False}): + comt = "user(s) removed" + ret.update( + { + "comment": comt, + "result": False, + "changes": {"new": "None", "old": [5]}, + } + ) + assert ipmi.user_absent(name) == ret diff --git a/tests/pytests/unit/states/test_jboss7.py b/tests/pytests/unit/states/test_jboss7.py new file mode 100644 index 000000000000..aab5a1434857 --- /dev/null +++ b/tests/pytests/unit/states/test_jboss7.py @@ -0,0 +1,752 @@ +# pylint: disable=unused-argument + + +import pytest + +import salt.states.jboss7 as jboss7 +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return { + jboss7: { + "__salt__": { + "jboss7.read_datasource": MagicMock(), + "jboss7.create_datasource": MagicMock(), + "jboss7.update_datasource": MagicMock(), + "jboss7.remove_datasource": MagicMock(), + "jboss7.read_simple_binding": MagicMock(), + "jboss7.create_simple_binding": MagicMock(), + "jboss7.update_simple_binding": MagicMock(), + "jboss7.undeploy": MagicMock(), + "jboss7.deploy": MagicMock, + "file.get_managed": MagicMock, + "file.manage_file": MagicMock, + "jboss7.list_deployments": MagicMock, + }, + "__env__": "base", + } + } + + +def test_should_not_redeploy_unchanged(): + # given + parameters = { + "target_file": "some_artifact", + "undeploy_force": False, + "undeploy": "some_artifact", + "source": "some_artifact_on_master", + } + jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"} + + def list_deployments(jboss_config): + return ["some_artifact"] + + def file_get_managed( + name, + template, + source, + source_hash, + source_hash_name, + user, + group, + mode, + attrs, + saltenv, + context, + defaults, + skip_verify, + kwargs, + ): + return "sfn", "hash", "" + + def file_manage_file( + name, + sfn, + ret, + source, + source_sum, + user, + group, + mode, + attrs, + saltenv, + backup, + makedirs, + template, + show_diff, + contents, + dir_mode, + ): + return {"result": True, "changes": False} + + jboss7_undeploy_mock = MagicMock() + jboss7_deploy_mock = MagicMock() + file_get_managed = MagicMock(side_effect=file_get_managed) + file_manage_file = MagicMock(side_effect=file_manage_file) + list_deployments_mock = MagicMock(side_effect=list_deployments) + with patch.dict( + jboss7.__salt__, + { + "jboss7.undeploy": jboss7_undeploy_mock, + "jboss7.deploy": jboss7_deploy_mock, + "file.get_managed": file_get_managed, + "file.manage_file": file_manage_file, + "jboss7.list_deployments": list_deployments_mock, + }, + ): + # when + result = jboss7.deployed( + name="unchanged", jboss_config=jboss_conf, salt_source=parameters + ) + + # then + assert not jboss7_undeploy_mock.called + assert not jboss7_deploy_mock.called + + +def test_should_redeploy_changed(): + # given + parameters = { + "target_file": "some_artifact", + "undeploy_force": False, + "undeploy": "some_artifact", + "source": "some_artifact_on_master", + } + jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"} + + def list_deployments(jboss_config): + return ["some_artifact"] + + def file_get_managed( + name, + template, + source, + source_hash, + source_hash_name, + user, + group, + mode, + attrs, + saltenv, + context, + defaults, + skip_verify, + kwargs, + ): + return "sfn", "hash", "" + + def file_manage_file( + name, + sfn, + ret, + source, + source_sum, + user, + group, + mode, + attrs, + saltenv, + backup, + makedirs, + template, + show_diff, + contents, + dir_mode, + ): + return {"result": True, "changes": True} + + jboss7_undeploy_mock = MagicMock() + jboss7_deploy_mock = MagicMock() + file_get_managed = MagicMock(side_effect=file_get_managed) + file_manage_file = MagicMock(side_effect=file_manage_file) + list_deployments_mock = MagicMock(side_effect=list_deployments) + with patch.dict( + jboss7.__salt__, + { + "jboss7.undeploy": jboss7_undeploy_mock, + "jboss7.deploy": jboss7_deploy_mock, + "file.get_managed": file_get_managed, + "file.manage_file": file_manage_file, + "jboss7.list_deployments": list_deployments_mock, + }, + ): + # when + result = jboss7.deployed( + name="unchanged", jboss_config=jboss_conf, salt_source=parameters + ) + + # then + assert jboss7_undeploy_mock.called + assert jboss7_deploy_mock.called + + +def test_should_deploy_different_artifact(): + # given + parameters = { + "target_file": "some_artifact", + "undeploy_force": False, + "undeploy": "some_artifact", + "source": "some_artifact_on_master", + } + jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"} + + def list_deployments(jboss_config): + return ["some_other_artifact"] + + def file_get_managed( + name, + template, + source, + source_hash, + source_hash_name, + user, + group, + mode, + attrs, + saltenv, + context, + defaults, + skip_verify, + kwargs, + ): + return "sfn", "hash", "" + + def file_manage_file( + name, + sfn, + ret, + source, + source_sum, + user, + group, + mode, + attrs, + saltenv, + backup, + makedirs, + template, + show_diff, + contents, + dir_mode, + ): + return {"result": True, "changes": False} + + jboss7_undeploy_mock = MagicMock() + jboss7_deploy_mock = MagicMock() + file_get_managed = MagicMock(side_effect=file_get_managed) + file_manage_file = MagicMock(side_effect=file_manage_file) + list_deployments_mock = MagicMock(side_effect=list_deployments) + with patch.dict( + jboss7.__salt__, + { + "jboss7.undeploy": jboss7_undeploy_mock, + "jboss7.deploy": jboss7_deploy_mock, + "file.get_managed": file_get_managed, + "file.manage_file": file_manage_file, + "jboss7.list_deployments": list_deployments_mock, + }, + ): + # when + result = jboss7.deployed( + name="unchanged", jboss_config=jboss_conf, salt_source=parameters + ) + + # then + assert not jboss7_undeploy_mock.called + assert jboss7_deploy_mock.called + + +def test_should_redploy_undeploy_force(): + # given + parameters = { + "target_file": "some_artifact", + "undeploy_force": True, + "undeploy": "some_artifact", + "source": "some_artifact_on_master", + } + jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"} + + def list_deployments(jboss_config): + return ["some_artifact"] + + def file_get_managed( + name, + template, + source, + source_hash, + source_hash_name, + user, + group, + mode, + attrs, + saltenv, + context, + defaults, + skip_verify, + kwargs, + ): + return "sfn", "hash", "" + + def file_manage_file( + name, + sfn, + ret, + source, + source_sum, + user, + group, + mode, + attrs, + saltenv, + backup, + makedirs, + template, + show_diff, + contents, + dir_mode, + ): + return {"result": True, "changes": False} + + jboss7_undeploy_mock = MagicMock() + jboss7_deploy_mock = MagicMock() + file_get_managed = MagicMock(side_effect=file_get_managed) + file_manage_file = MagicMock(side_effect=file_manage_file) + list_deployments_mock = MagicMock(side_effect=list_deployments) + with patch.dict( + jboss7.__salt__, + { + "jboss7.undeploy": jboss7_undeploy_mock, + "jboss7.deploy": jboss7_deploy_mock, + "file.get_managed": file_get_managed, + "file.manage_file": file_manage_file, + "jboss7.list_deployments": list_deployments_mock, + }, + ): + # when + result = jboss7.deployed( + name="unchanged", jboss_config=jboss_conf, salt_source=parameters + ) + + # then + assert jboss7_undeploy_mock.called + assert jboss7_deploy_mock.called + + +def test_should_create_new_datasource_if_not_exists(): + # given + datasource_properties = {"connection-url": "jdbc:/old-connection-url"} + ds_status = {"created": False} + + def read_func(jboss_config, name, profile): + if ds_status["created"]: + return {"success": True, "result": datasource_properties} + else: + return {"success": False, "err_code": "JBAS014807"} + + def create_func(jboss_config, name, datasource_properties, profile): + ds_status["created"] = True + return {"success": True} + + read_mock = MagicMock(side_effect=read_func) + create_mock = MagicMock(side_effect=create_func) + update_mock = MagicMock() + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_datasource": read_mock, + "jboss7.create_datasource": create_mock, + "jboss7.update_datasource": update_mock, + }, + ): + + # when + result = jboss7.datasource_exists( + name="appDS", + jboss_config={}, + datasource_properties=datasource_properties, + profile=None, + ) + + # then + create_mock.assert_called_with( + name="appDS", + jboss_config={}, + datasource_properties=datasource_properties, + profile=None, + ) + + assert not update_mock.called + assert result["comment"] == "Datasource created." + + +def test_should_update_the_datasource_if_exists(): + ds_status = {"updated": False} + + def read_func(jboss_config, name, profile): + if ds_status["updated"]: + return { + "success": True, + "result": {"connection-url": "jdbc:/new-connection-url"}, + } + else: + return { + "success": True, + "result": {"connection-url": "jdbc:/old-connection-url"}, + } + + def update_func(jboss_config, name, new_properties, profile): + ds_status["updated"] = True + return {"success": True} + + read_mock = MagicMock(side_effect=read_func) + create_mock = MagicMock() + update_mock = MagicMock(side_effect=update_func) + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_datasource": read_mock, + "jboss7.create_datasource": create_mock, + "jboss7.update_datasource": update_mock, + }, + ): + result = jboss7.datasource_exists( + name="appDS", + jboss_config={}, + datasource_properties={"connection-url": "jdbc:/new-connection-url"}, + profile=None, + ) + + update_mock.assert_called_with( + name="appDS", + jboss_config={}, + new_properties={"connection-url": "jdbc:/new-connection-url"}, + profile=None, + ) + assert read_mock.called + assert result["comment"] == "Datasource updated." + + +def test_should_recreate_the_datasource_if_specified(): + read_mock = MagicMock( + return_value={ + "success": True, + "result": {"connection-url": "jdbc:/same-connection-url"}, + } + ) + create_mock = MagicMock(return_value={"success": True}) + remove_mock = MagicMock(return_value={"success": True}) + update_mock = MagicMock() + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_datasource": read_mock, + "jboss7.create_datasource": create_mock, + "jboss7.remove_datasource": remove_mock, + "jboss7.update_datasource": update_mock, + }, + ): + result = jboss7.datasource_exists( + name="appDS", + jboss_config={}, + datasource_properties={"connection-url": "jdbc:/same-connection-url"}, + recreate=True, + ) + + remove_mock.assert_called_with(name="appDS", jboss_config={}, profile=None) + create_mock.assert_called_with( + name="appDS", + jboss_config={}, + datasource_properties={"connection-url": "jdbc:/same-connection-url"}, + profile=None, + ) + assert result["changes"]["removed"] == "appDS" + assert result["changes"]["created"] == "appDS" + + +def test_should_inform_if_the_datasource_has_not_changed(): + read_mock = MagicMock( + return_value={ + "success": True, + "result": {"connection-url": "jdbc:/same-connection-url"}, + } + ) + create_mock = MagicMock() + remove_mock = MagicMock() + update_mock = MagicMock(return_value={"success": True}) + + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_datasource": read_mock, + "jboss7.create_datasource": create_mock, + "jboss7.remove_datasource": remove_mock, + "jboss7.update_datasource": update_mock, + }, + ): + result = jboss7.datasource_exists( + name="appDS", + jboss_config={}, + datasource_properties={"connection-url": "jdbc:/old-connection-url"}, + ) + + update_mock.assert_called_with( + name="appDS", + jboss_config={}, + new_properties={"connection-url": "jdbc:/old-connection-url"}, + profile=None, + ) + assert not create_mock.called + assert result["comment"] == "Datasource not changed." + + +def test_should_create_binding_if_not_exists(): + # given + binding_status = {"created": False} + + def read_func(jboss_config, binding_name, profile): + if binding_status["created"]: + return {"success": True, "result": {"value": "DEV"}} + else: + return {"success": False, "err_code": "JBAS014807"} + + def create_func(jboss_config, binding_name, value, profile): + binding_status["created"] = True + return {"success": True} + + read_mock = MagicMock(side_effect=read_func) + create_mock = MagicMock(side_effect=create_func) + update_mock = MagicMock() + + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_simple_binding": read_mock, + "jboss7.create_simple_binding": create_mock, + "jboss7.update_simple_binding": update_mock, + }, + ): + + # when + result = jboss7.bindings_exist( + name="bindings", jboss_config={}, bindings={"env": "DEV"}, profile=None + ) + + # then + create_mock.assert_called_with( + jboss_config={}, binding_name="env", value="DEV", profile=None + ) + assert update_mock.call_count == 0 + assert result["changes"] == {"added": "env:DEV\n"} + assert result["comment"] == "Bindings changed." + + +def test_should_update_bindings_if_exists_and_different(): + # given + binding_status = {"updated": False} + + def read_func(jboss_config, binding_name, profile): + if binding_status["updated"]: + return {"success": True, "result": {"value": "DEV2"}} + else: + return {"success": True, "result": {"value": "DEV"}} + + def update_func(jboss_config, binding_name, value, profile): + binding_status["updated"] = True + return {"success": True} + + read_mock = MagicMock(side_effect=read_func) + create_mock = MagicMock() + update_mock = MagicMock(side_effect=update_func) + + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_simple_binding": read_mock, + "jboss7.create_simple_binding": create_mock, + "jboss7.update_simple_binding": update_mock, + }, + ): + # when + result = jboss7.bindings_exist( + name="bindings", jboss_config={}, bindings={"env": "DEV2"}, profile=None + ) + + # then + update_mock.assert_called_with( + jboss_config={}, binding_name="env", value="DEV2", profile=None + ) + assert create_mock.call_count == 0 + assert result["changes"] == {"changed": "env:DEV->DEV2\n"} + assert result["comment"] == "Bindings changed." + + +def test_should_not_update_bindings_if_same(): + # given + read_mock = MagicMock(return_value={"success": True, "result": {"value": "DEV2"}}) + create_mock = MagicMock() + update_mock = MagicMock() + + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_simple_binding": read_mock, + "jboss7.create_simple_binding": create_mock, + "jboss7.update_simple_binding": update_mock, + }, + ): + # when + result = jboss7.bindings_exist( + name="bindings", jboss_config={}, bindings={"env": "DEV2"} + ) + + # then + assert create_mock.call_count == 0 + assert update_mock.call_count == 0 + assert result["changes"] == {} + assert result["comment"] == "Bindings not changed." + + +def test_should_raise_exception_if_cannot_create_binding(): + def read_func(jboss_config, binding_name, profile): + return {"success": False, "err_code": "JBAS014807"} + + def create_func(jboss_config, binding_name, value, profile): + return {"success": False, "failure-description": "Incorrect binding name."} + + read_mock = MagicMock(side_effect=read_func) + create_mock = MagicMock(side_effect=create_func) + update_mock = MagicMock() + + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_simple_binding": read_mock, + "jboss7.create_simple_binding": create_mock, + "jboss7.update_simple_binding": update_mock, + }, + ): + # when + with pytest.raises(CommandExecutionError) as exc: + jboss7.bindings_exist( + name="bindings", + jboss_config={}, + bindings={"env": "DEV2"}, + profile=None, + ) + assert str(exc.value) == "Incorrect binding name." + + +def test_should_raise_exception_if_cannot_update_binding(): + def read_func(jboss_config, binding_name, profile): + return {"success": True, "result": {"value": "DEV"}} + + def update_func(jboss_config, binding_name, value, profile): + return {"success": False, "failure-description": "Incorrect binding name."} + + read_mock = MagicMock(side_effect=read_func) + create_mock = MagicMock() + update_mock = MagicMock(side_effect=update_func) + + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_simple_binding": read_mock, + "jboss7.create_simple_binding": create_mock, + "jboss7.update_simple_binding": update_mock, + }, + ): + + # when + with pytest.raises(CommandExecutionError) as exc: + jboss7.bindings_exist( + name="bindings", + jboss_config={}, + bindings={"env": "DEV2"}, + profile=None, + ) + assert str(exc.value) == "Incorrect binding name." + + +def test_datasource_exist_create_datasource_good_code(): + jboss_config = { + "cli_path": "/home/ch44d/Desktop/wildfly-18.0.0.Final/bin/jboss-cli.sh", + "controller": "127.0.0.1: 9990", + "cli_user": "user", + "cli_password": "user", + } + + datasource_properties = { + "driver - name": "h2", + "connection - url": "jdbc:sqlserver://127.0.0.1:1433;DatabaseName=test_s2", + "jndi - name": ( + "java:/home/ch44d/Desktop/sqljdbc_7.4/enu/mssql-jdbc-7.4.1.jre8.jar" + ), + "user - name": "user", + "password": "user", + "use - java - context": True, + } + + read_datasource = MagicMock( + return_value={"success": False, "err_code": "WFLYCTL0216"} + ) + + error_msg = "Error: -1" + create_datasource = MagicMock(return_value={"success": False, "stdout": error_msg}) + + with patch.dict( + jboss7.__salt__, + { + "jboss7.read_datasource": read_datasource, + "jboss7.create_datasource": create_datasource, + }, + ): + ret = jboss7.datasource_exists("SQL", jboss_config, datasource_properties) + + assert "result" in ret + assert not ret["result"] + assert "comment" in ret + assert error_msg in ret["comment"] + + read_datasource.assert_called_once() + create_datasource.assert_called_once() + + +def test_datasource_exist_create_datasource_bad_code(): + jboss_config = { + "cli_path": "/home/ch44d/Desktop/wildfly-18.0.0.Final/bin/jboss-cli.sh", + "controller": "127.0.0.1: 9990", + "cli_user": "user", + "cli_password": "user", + } + + datasource_properties = { + "driver - name": "h2", + "connection - url": "jdbc:sqlserver://127.0.0.1:1433;DatabaseName=test_s2", + "jndi - name": ( + "java:/home/ch44d/Desktop/sqljdbc_7.4/enu/mssql-jdbc-7.4.1.jre8.jar" + ), + "user - name": "user", + "password": "user", + "use - java - context": True, + } + + read_datasource = MagicMock( + return_value={ + "success": False, + "err_code": "WFLYCTL0217", + "failure-description": "Something happened", + } + ) + + with patch.dict(jboss7.__salt__, {"jboss7.read_datasource": read_datasource}): + pytest.raises( + CommandExecutionError, + jboss7.datasource_exists, + "SQL", + jboss_config, + datasource_properties, + ) + read_datasource.assert_called_once() diff --git a/tests/pytests/unit/states/test_kapacitor.py b/tests/pytests/unit/states/test_kapacitor.py new file mode 100644 index 000000000000..df7484555413 --- /dev/null +++ b/tests/pytests/unit/states/test_kapacitor.py @@ -0,0 +1,135 @@ +""" + Test cases for salt.states.kapacitor +""" + + +import pytest + +import salt.states.kapacitor as kapacitor +from tests.support.mock import Mock, mock_open, patch + + +@pytest.fixture +def configure_loader_modules(): + return {kapacitor: {"__opts__": {"test": False}, "__env__": "test"}} + + +def _present( + name="testname", + tick_script="/tmp/script.tick", + task_type="stream", + database="testdb", + retention_policy="default", + dbrps=None, + enable=True, + task=None, + define_result=True, + enable_result=True, + disable_result=True, + script="testscript", +): + """ + Run a "kapacitor.present" state after setting up mocks, and return the + state return value as well as the mocks to make assertions. + """ + get_mock = Mock(return_value=task) + + if isinstance(define_result, bool): + define_result = {"success": define_result} + define_mock = Mock(return_value=define_result) + + if isinstance(enable_result, bool): + enable_result = {"success": enable_result} + enable_mock = Mock(return_value=enable_result) + + if isinstance(disable_result, bool): + disable_result = {"success": disable_result} + disable_mock = Mock(return_value=disable_result) + + with patch.dict( + kapacitor.__salt__, + { + "kapacitor.get_task": get_mock, + "kapacitor.define_task": define_mock, + "kapacitor.enable_task": enable_mock, + "kapacitor.disable_task": disable_mock, + }, + ): + with patch("salt.utils.files.fopen", mock_open(read_data=script)) as open_mock: + retval = kapacitor.task_present( + name, + tick_script, + task_type=task_type, + database=database, + retention_policy=retention_policy, + enable=enable, + dbrps=dbrps, + ) + + return retval, get_mock, define_mock, enable_mock, disable_mock + + +def _task( + script="testscript", enabled=True, task_type="stream", db="testdb", rp="default" +): + return { + "script": script, + "enabled": enabled, + "type": task_type, + "dbrps": [{"db": db, "rp": rp}], + } + + +def test_task_present_new_task(): + ret, get_mock, define_mock, enable_mock, _ = _present(dbrps=["testdb2.default_rp"]) + get_mock.assert_called_once_with("testname") + define_mock.assert_called_once_with( + "testname", + "/tmp/script.tick", + database="testdb", + retention_policy="default", + task_type="stream", + dbrps=["testdb2.default_rp", "testdb.default"], + ) + enable_mock.assert_called_once_with("testname") + assert "TICKscript diff" in ret["changes"] + assert "enabled" in ret["changes"] + assert ret["changes"]["enabled"]["new"] is True + + +def test_task_present_existing_task_updated_script(): + ret, get_mock, define_mock, enable_mock, _ = _present( + task=_task(script="oldscript") + ) + get_mock.assert_called_once_with("testname") + define_mock.assert_called_once_with( + "testname", + "/tmp/script.tick", + database="testdb", + retention_policy="default", + task_type="stream", + dbrps=["testdb.default"], + ) + assert enable_mock.called is False + assert "TICKscript diff" in ret["changes"] + assert "enabled" not in ret["changes"] + + +def test_task_present_existing_task_not_enabled(): + ret, get_mock, define_mock, enable_mock, _ = _present(task=_task(enabled=False)) + get_mock.assert_called_once_with("testname") + assert define_mock.called is False + enable_mock.assert_called_once_with("testname") + assert "diff" not in ret["changes"] + assert "enabled" in ret["changes"] + assert ret["changes"]["enabled"]["new"] is True + + +def test_task_present_disable_existing_task(): + ret, get_mock, define_mock, _, disable_mock = _present(task=_task(), enable=False) + get_mock.assert_called_once_with("testname") + assert define_mock.called is False + disable_mock.assert_called_once_with("testname") + assert "diff" not in ret["changes"] + assert "enabled" in ret["changes"] + assert ret["changes"]["enabled"]["new"] is False diff --git a/tests/pytests/unit/states/test_kernelpkg.py b/tests/pytests/unit/states/test_kernelpkg.py new file mode 100644 index 000000000000..22b3cbf2db01 --- /dev/null +++ b/tests/pytests/unit/states/test_kernelpkg.py @@ -0,0 +1,153 @@ +""" + Test cases for salt.states.kernelpkg +""" + + +import pytest + +import salt.states.kernelpkg as kernelpkg +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def kernel_list(): + return ["4.4.0-70-generic", "4.4.0-71-generic", "4.5.1-14-generic"] + + +@pytest.fixture +def state_name(): + return "kernelpkg-test" + + +@pytest.fixture +def configure_loader_modules(): + return { + kernelpkg: { + "__salt__": { + "system.reboot": MagicMock(return_value=None), + "kernelpkg.upgrade": MagicMock( + return_value={ + "upgrades": {"kernel": {"old": "1.0.0", "new": "2.0.0"}} + } + ), + "kernelpkg.active": MagicMock(return_value=0), + "kernelpkg.latest_installed": MagicMock(return_value=0), + } + } + } + + +def test_latest_installed_with_changes(kernel_list, state_name): + """ + Test - latest_installed when an upgrade is available + """ + installed = MagicMock(return_value=kernel_list[:-1]) + upgrade = MagicMock(return_value=kernel_list[-1]) + with patch.dict(kernelpkg.__salt__, {"kernelpkg.list_installed": installed}): + with patch.dict(kernelpkg.__salt__, {"kernelpkg.latest_available": upgrade}): + with patch.dict(kernelpkg.__opts__, {"test": False}): + kernelpkg.__salt__["kernelpkg.upgrade"].reset_mock() + ret = kernelpkg.latest_installed(name=state_name) + assert ret["name"] == state_name + assert ret["result"] + assert isinstance(ret["changes"], dict) + assert isinstance(ret["comment"], str) + kernelpkg.__salt__["kernelpkg.upgrade"].assert_called_once() + + with patch.dict(kernelpkg.__opts__, {"test": True}): + kernelpkg.__salt__["kernelpkg.upgrade"].reset_mock() + ret = kernelpkg.latest_installed(name=state_name) + assert ret["name"] == state_name + assert ret["result"] is None + assert ret["changes"] == {} + assert isinstance(ret["comment"], str) + kernelpkg.__salt__["kernelpkg.upgrade"].assert_not_called() + + +def test_latest_installed_at_latest(kernel_list, state_name): + """ + Test - latest_installed when no upgrade is available + """ + installed = MagicMock(return_value=kernel_list) + upgrade = MagicMock(return_value=kernel_list[-1]) + with patch.dict(kernelpkg.__salt__, {"kernelpkg.list_installed": installed}): + with patch.dict(kernelpkg.__salt__, {"kernelpkg.latest_available": upgrade}): + with patch.dict(kernelpkg.__opts__, {"test": False}): + ret = kernelpkg.latest_installed(name=state_name) + assert ret["name"] == state_name + assert ret["result"] + assert ret["changes"] == {} + assert isinstance(ret["comment"], str) + kernelpkg.__salt__["kernelpkg.upgrade"].assert_not_called() + + with patch.dict(kernelpkg.__opts__, {"test": True}): + ret = kernelpkg.latest_installed(name=state_name) + assert ret["name"] == state_name + assert ret["result"] + assert ret["changes"] == {} + assert isinstance(ret["comment"], str) + kernelpkg.__salt__["kernelpkg.upgrade"].assert_not_called() + + +def test_latest_active_with_changes(state_name): + """ + Test - latest_active when a new kernel is available + """ + reboot = MagicMock(return_value=True) + latest = MagicMock(return_value=1) + with patch.dict( + kernelpkg.__salt__, + {"kernelpkg.needs_reboot": reboot, "kernelpkg.latest_installed": latest}, + ), patch.dict(kernelpkg.__opts__, {"test": False}): + kernelpkg.__salt__["system.reboot"].reset_mock() + ret = kernelpkg.latest_active(name=state_name) + assert ret["name"] == state_name + assert ret["result"] + assert isinstance(ret["changes"], dict) + assert isinstance(ret["comment"], str) + kernelpkg.__salt__["system.reboot"].assert_called_once() + + with patch.dict(kernelpkg.__opts__, {"test": True}): + kernelpkg.__salt__["system.reboot"].reset_mock() + ret = kernelpkg.latest_active(name=state_name) + assert ret["name"] == state_name + assert ret["result"] is None + assert ret["changes"] == {"kernel": {"new": 1, "old": 0}} + assert isinstance(ret["comment"], str) + kernelpkg.__salt__["system.reboot"].assert_not_called() + + +def test_latest_active_at_latest(state_name): + """ + Test - latest_active when the newest kernel is already active + """ + reboot = MagicMock(return_value=False) + with patch.dict(kernelpkg.__salt__, {"kernelpkg.needs_reboot": reboot}): + with patch.dict(kernelpkg.__opts__, {"test": False}): + kernelpkg.__salt__["system.reboot"].reset_mock() + ret = kernelpkg.latest_active(name=state_name) + assert ret["name"] == state_name + assert ret["result"] + assert ret["changes"] == {} + assert isinstance(ret["comment"], str) + kernelpkg.__salt__["system.reboot"].assert_not_called() + + with patch.dict(kernelpkg.__opts__, {"test": True}): + kernelpkg.__salt__["system.reboot"].reset_mock() + ret = kernelpkg.latest_active(name=state_name) + assert ret["name"] == state_name + assert ret["result"] + assert ret["changes"] == {} + assert isinstance(ret["comment"], str) + kernelpkg.__salt__["system.reboot"].assert_not_called() + + +def test_latest_wait(state_name): + """ + Test - latest_wait static results + """ + ret = kernelpkg.latest_wait(name=state_name) + assert ret["name"] == state_name + assert ret["result"] + assert ret["changes"] == {} + assert isinstance(ret["comment"], str) diff --git a/tests/pytests/unit/states/test_keystone.py b/tests/pytests/unit/states/test_keystone.py new file mode 100644 index 000000000000..d0461180433c --- /dev/null +++ b/tests/pytests/unit/states/test_keystone.py @@ -0,0 +1,397 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.states.keystone +""" + +import pytest + +import salt.states.keystone as keystone +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {keystone: {}} + + +def test_user_present(): + """ + Test to ensure that the keystone user is present + with the specified properties. + """ + name = "nova" + password = "$up3rn0v4" + email = "nova@domain.com" + tenant = "demo" + + ret = {"name": name, "result": False, "changes": {}, "comment": ""} + + mock_f = MagicMock(return_value=False) + mock_lst = MagicMock(return_value=["Error"]) + with patch.dict(keystone.__salt__, {"keystone.tenant_get": mock_lst}): + comt = 'Tenant / project "{}" does not exist'.format(tenant) + ret.update({"comment": comt}) + assert keystone.user_present(name, password, email, tenant) == ret + + mock_dict = MagicMock( + side_effect=[ + {name: {"email": "a@a.com"}}, + {name: {"email": email, "enabled": False}}, + {name: {"email": email, "enabled": True}}, + {name: {"email": email, "enabled": True}}, + {"Error": "error"}, + {"Error": "error"}, + ] + ) + mock_l = MagicMock(return_value={tenant: {"id": "abc"}}) + with patch.dict( + keystone.__salt__, + { + "keystone.user_get": mock_dict, + "keystone.tenant_get": mock_l, + "keystone.user_verify_password": mock_f, + "keystone.user_create": mock_f, + }, + ): + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'User "{}" will be updated'.format(name) + ret.update( + { + "comment": comt, + "result": None, + "changes": { + "Email": "Will be updated", + "Enabled": "Will be True", + "Password": "Will be updated", + }, + } + ) + assert keystone.user_present(name, password, email) == ret + + ret.update( + { + "comment": comt, + "result": None, + "changes": { + "Enabled": "Will be True", + "Password": "Will be updated", + }, + } + ) + assert keystone.user_present(name, password, email) == ret + + ret.update( + { + "comment": comt, + "result": None, + "changes": { + "Tenant": 'Will be added to "demo" tenant', + "Password": "Will be updated", + }, + } + ) + assert keystone.user_present(name, password, email, tenant) == ret + + ret.update( + { + "comment": comt, + "result": None, + "changes": {"Password": "Will be updated"}, + } + ) + assert keystone.user_present(name, password, email) == ret + + comt = 'Keystone user "nova" will be added' + ret.update( + { + "comment": comt, + "result": None, + "changes": {"User": "Will be created"}, + } + ) + assert keystone.user_present(name, password, email) == ret + + with patch.dict(keystone.__opts__, {"test": False}): + comt = "Keystone user {} has been added".format(name) + ret.update( + {"comment": comt, "result": True, "changes": {"User": "Created"}} + ) + assert keystone.user_present(name, password, email) == ret + + +def test_user_absent(): + """ + Test to ensure that the keystone user is absent. + """ + name = "nova" + + ret = { + "name": name, + "changes": {}, + "result": True, + "comment": 'User "{}" is already absent'.format(name), + } + + mock_lst = MagicMock(side_effect=[["Error"], []]) + with patch.dict(keystone.__salt__, {"keystone.user_get": mock_lst}): + assert keystone.user_absent(name) == ret + + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'User "{}" will be deleted'.format(name) + ret.update({"comment": comt, "result": None}) + assert keystone.user_absent(name) == ret + + +def test_tenant_present(): + """ + Test to ensures that the keystone tenant exists + """ + name = "nova" + description = "OpenStack Compute Service" + + ret = { + "name": name, + "changes": {}, + "result": True, + "comment": 'Tenant / project "{}" already exists'.format(name), + } + + mock_dict = MagicMock( + side_effect=[ + {name: {"description": "desc"}}, + {name: {"description": description, "enabled": False}}, + {"Error": "error"}, + {"Error": "error"}, + ] + ) + mock_t = MagicMock(return_value=True) + with patch.dict( + keystone.__salt__, + {"keystone.tenant_get": mock_dict, "keystone.tenant_create": mock_t}, + ): + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'Tenant / project "{}" will be updated'.format(name) + ret.update( + { + "comment": comt, + "result": None, + "changes": {"Description": "Will be updated"}, + } + ) + assert keystone.tenant_present(name) == ret + + comt = 'Tenant / project "{}" will be updated'.format(name) + ret.update( + { + "comment": comt, + "result": None, + "changes": {"Enabled": "Will be True"}, + } + ) + assert keystone.tenant_present(name, description) == ret + + comt = 'Tenant / project "{}" will be added'.format(name) + ret.update( + { + "comment": comt, + "result": None, + "changes": {"Tenant": "Will be created"}, + } + ) + assert keystone.tenant_present(name) == ret + + with patch.dict(keystone.__opts__, {"test": False}): + comt = 'Tenant / project "{}" has been added'.format(name) + ret.update( + {"comment": comt, "result": True, "changes": {"Tenant": "Created"}} + ) + assert keystone.tenant_present(name) == ret + + +def test_tenant_absent(): + """ + Test to ensure that the keystone tenant is absent. + """ + name = "nova" + + ret = { + "name": name, + "changes": {}, + "result": True, + "comment": 'Tenant / project "{}" is already absent'.format(name), + } + + mock_lst = MagicMock(side_effect=[["Error"], []]) + with patch.dict(keystone.__salt__, {"keystone.tenant_get": mock_lst}): + assert keystone.tenant_absent(name) == ret + + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'Tenant / project "{}" will be deleted'.format(name) + ret.update({"comment": comt, "result": None}) + assert keystone.tenant_absent(name) == ret + + +def test_role_present(): + """ + Test to ensures that the keystone role exists + """ + name = "nova" + + ret = { + "name": name, + "changes": {}, + "result": True, + "comment": 'Role "{}" already exists'.format(name), + } + + mock_lst = MagicMock(side_effect=[[], ["Error"]]) + with patch.dict(keystone.__salt__, {"keystone.role_get": mock_lst}): + assert keystone.role_present(name) == ret + + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'Role "{}" will be added'.format(name) + ret.update({"comment": comt, "result": None}) + assert keystone.role_present(name) == ret + + +def test_role_absent(): + """ + Test to ensure that the keystone role is absent. + """ + name = "nova" + + ret = { + "name": name, + "changes": {}, + "result": True, + "comment": 'Role "{}" is already absent'.format(name), + } + + mock_lst = MagicMock(side_effect=[["Error"], []]) + with patch.dict(keystone.__salt__, {"keystone.role_get": mock_lst}): + assert keystone.role_absent(name) == ret + + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'Role "{}" will be deleted'.format(name) + ret.update({"comment": comt, "result": None}) + assert keystone.role_absent(name) == ret + + +def test_service_present(): + """ + Test to ensure service present in Keystone catalog + """ + name = "nova" + service_type = "compute" + + ret = { + "name": name, + "changes": {}, + "result": True, + "comment": 'Service "{}" already exists'.format(name), + } + + mock_lst = MagicMock(side_effect=[[], ["Error"]]) + with patch.dict(keystone.__salt__, {"keystone.service_get": mock_lst}): + assert keystone.service_present(name, service_type) == ret + + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'Service "{}" will be added'.format(name) + ret.update({"comment": comt, "result": None}) + assert keystone.service_present(name, service_type) == ret + + +def test_service_absent(): + """ + Test to ensure that the service doesn't exist in Keystone catalog + """ + name = "nova" + + ret = { + "name": name, + "changes": {}, + "result": True, + "comment": 'Service "{}" is already absent'.format(name), + } + + mock_lst = MagicMock(side_effect=[["Error"], []]) + with patch.dict(keystone.__salt__, {"keystone.service_get": mock_lst}): + assert keystone.service_absent(name) == ret + + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'Service "{}" will be deleted'.format(name) + ret.update({"comment": comt, "result": None}) + assert keystone.service_absent(name) == ret + + +def test_endpoint_present(): + """ + Test to ensure the specified endpoints exists for service + """ + name = "nova" + region = "RegionOne" + + ret = {"name": name, "changes": {}, "result": True, "comment": ""} + + endpoint = { + "adminurl": None, + "region": None, + "internalurl": None, + "publicurl": None, + "id": 1, + "service_id": None, + } + + mock_lst = MagicMock( + side_effect=[endpoint, ["Error"], {"id": 1, "service_id": None}, []] + ) + mock = MagicMock(return_value=True) + with patch.dict( + keystone.__salt__, + {"keystone.endpoint_get": mock_lst, "keystone.endpoint_create": mock}, + ): + + comt = 'Endpoint for service "{}" already exists'.format(name) + ret.update({"comment": comt, "result": True, "changes": {}}) + assert keystone.endpoint_present(name) == ret + + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'Endpoint for service "{}" will be added'.format(name) + ret.update( + { + "comment": comt, + "result": None, + "changes": {"Endpoint": "Will be created"}, + } + ) + assert keystone.endpoint_present(name) == ret + + comt = 'Endpoint for service "{}" already exists'.format(name) + ret.update({"comment": comt, "result": True, "changes": {}}) + assert keystone.endpoint_present(name) == ret + + with patch.dict(keystone.__opts__, {"test": False}): + comt = 'Endpoint for service "{}" has been added'.format(name) + ret.update({"comment": comt, "result": True, "changes": True}) + assert keystone.endpoint_present(name) == ret + + +def test_endpoint_absent(): + """ + Test to ensure that the endpoint for a service doesn't + exist in Keystone catalog + """ + name = "nova" + region = "RegionOne" + comment = 'Endpoint for service "{}" is already absent'.format(name) + ret = {"name": name, "changes": {}, "result": True, "comment": comment} + + mock_lst = MagicMock(side_effect=[[], ["Error"]]) + with patch.dict(keystone.__salt__, {"keystone.endpoint_get": mock_lst}): + assert keystone.endpoint_absent(name, region) == ret + + with patch.dict(keystone.__opts__, {"test": True}): + comt = 'Endpoint for service "{}" will be deleted'.format(name) + ret.update({"comment": comt, "result": None}) + assert keystone.endpoint_absent(name, region) == ret diff --git a/tests/pytests/unit/states/test_keystore.py b/tests/pytests/unit/states/test_keystore.py new file mode 100644 index 000000000000..8e6bfdabf250 --- /dev/null +++ b/tests/pytests/unit/states/test_keystore.py @@ -0,0 +1,497 @@ +""" + Test cases for salt.states.keystore +""" + +import pytest + +import salt.states.keystore as keystore +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {keystore: {"__opts__": {"test": False}}} + + +def test_cert_already_present(): + """ + Test for existing value_present + """ + + cert_return = [ + { + "valid_until": "August 21 2017", + "sha1": "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( + __salt_system_encoding__ + ), + "valid_start": "August 22 2012", + "type": "TrustedCertEntry", + "alias": "stringhost", + "expired": True, + } + ] + x509_return = { + "Not After": "2017-08-21 05:26:54", + "Subject Hash": "97:95:14:4F", + "Serial Number": "0D:FA", + "SHA1 Finger Print": ( + "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( + __salt_system_encoding__ + ) + ), + "SHA-256 Finger Print": "5F:0F:B5:16:65:81:AA:E6:4A:10:1C:15:83:B1:BE:BE:74:E8:14:A9:1E:7A:8A:14:BA:1E:83:5D:78:F6:E9:E7".encode( + __salt_system_encoding__ + ), + "MD5 Finger Print": "80:E6:17:AF:78:D8:E4:B8:FB:5F:41:3A:27:1D:CC:F2".encode( + __salt_system_encoding__ + ), + "Version": 1, + "Key Size": 512, + "Public Key": ( + "-----BEGIN PUBLIC" + " KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJv8ZpB5hEK7qxP9K3v43hUS5fGT4waK\ne7ix4Z4mu5UBv+cw7WSFAt0Vaag0sAbsPzU8Hhsrj/qPABvfB8asUwcCAwEAAQ==\n-----END" + " PUBLIC KEY-----\n" + ), + "Issuer": { + "C": "JP", + "organizationName": "Frank4DD", + "CN": "Frank4DD Web CA", + "SP": "Tokyo", + "L": "Chuo-ku", + "emailAddress": "support@frank4dd.com", + "OU": "WebCert Support", + }, + "Issuer Hash": "92:DA:45:6B", + "Not Before": "2012-08-22 05:26:54", + "Subject": { + "C": "JP", + "SP": "Tokyo", + "organizationName": "Frank4DD", + "CN": "www.example.com", + }, + } + + name = "keystore.jks" + passphrase = "changeit" + entries = [ + { + "alias": "stringhost", + "certificate": """-----BEGIN CERTIFICATE----- + MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG + A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE + MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl + YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw + ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE + CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs + ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl + 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID + AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx + 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy + 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 + Hn+GmxZA\n-----END CERTIFICATE-----""", + } + ] + + state_return = { + "name": name, + "changes": {}, + "result": True, + "comment": "No changes made.\n", + } + + # with patch.dict(keystore.__opts__, {'test': False}): + with patch("os.path.exists", return_value=True): + with patch.dict( + keystore.__salt__, + { + "keystore.list": MagicMock(return_value=cert_return), + "x509.read_certificate": MagicMock(return_value=x509_return), + }, + ): + assert keystore.managed(name, passphrase, entries) == state_return + + with patch("os.path.exists", return_value=True): + with patch.dict(keystore.__opts__, {"test": True}): + with patch.dict( + keystore.__salt__, + { + "keystore.list": MagicMock(return_value=cert_return), + "x509.read_certificate": MagicMock(return_value=x509_return), + }, + ): + assert keystore.managed(name, passphrase, entries) == state_return + + +def test_cert_update(): + """ + Test for existing value_present + """ + + cert_return = [ + { + "valid_until": "August 21 2017", + "sha1": "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( + __salt_system_encoding__ + ), + "valid_start": "August 22 2012", + "type": "TrustedCertEntry", + "alias": "stringhost", + "expired": True, + } + ] + x509_return = { + "Not After": "2017-08-21 05:26:54", + "Subject Hash": "97:95:14:4F", + "Serial Number": "0D:FA", + "SHA1 Finger Print": ( + "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D6".encode( + __salt_system_encoding__ + ) + ), + "SHA-256 Finger Print": "5F:0F:B5:16:65:81:AA:E6:4A:10:1C:15:83:B1:BE:BE:74:E8:14:A9:1E:7A:8A:14:BA:1E:83:5D:78:F6:E9:E7", + "MD5 Finger Print": "80:E6:17:AF:78:D8:E4:B8:FB:5F:41:3A:27:1D:CC:F2", + "Version": 1, + "Key Size": 512, + "Public Key": ( + "-----BEGIN PUBLIC" + " KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJv8ZpB5hEK7qxP9K3v43hUS5fGT4waK\ne7ix4Z4mu5UBv+cw7WSFAt0Vaag0sAbsPzU8Hhsrj/qPABvfB8asUwcCAwEAAQ==\n-----END" + " PUBLIC KEY-----\n" + ), + "Issuer": { + "C": "JP", + "organizationName": "Frank4DD", + "CN": "Frank4DD Web CA", + "SP": "Tokyo", + "L": "Chuo-ku", + "emailAddress": "support@frank4dd.com", + "OU": "WebCert Support", + }, + "Issuer Hash": "92:DA:45:6B", + "Not Before": "2012-08-22 05:26:54", + "Subject": { + "C": "JP", + "SP": "Tokyo", + "organizationName": "Frank4DD", + "CN": "www.example.com", + }, + } + + name = "keystore.jks" + passphrase = "changeit" + entries = [ + { + "alias": "stringhost", + "certificate": """-----BEGIN CERTIFICATE----- + MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG + A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE + MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl + YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw + ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE + CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs + ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl + 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID + AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx + 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy + 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 + Hn+GmxZA\n-----END CERTIFICATE-----""", + } + ] + + test_return = { + "name": name, + "changes": {}, + "result": None, + "comment": "Alias stringhost would have been updated\n", + } + state_return = { + "name": name, + "changes": {"stringhost": "Updated"}, + "result": True, + "comment": "Alias stringhost updated.\n", + } + + with patch.dict(keystore.__opts__, {"test": True}): + with patch("os.path.exists", return_value=True): + with patch.dict( + keystore.__salt__, + { + "keystore.list": MagicMock(return_value=cert_return), + "x509.read_certificate": MagicMock(return_value=x509_return), + }, + ): + assert keystore.managed(name, passphrase, entries) == test_return + + with patch("os.path.exists", return_value=True): + with patch.dict( + keystore.__salt__, + { + "keystore.list": MagicMock(return_value=cert_return), + "x509.read_certificate": MagicMock(return_value=x509_return), + "keystore.remove": MagicMock(return_value=True), + "keystore.add": MagicMock(return_value=True), + }, + ): + assert keystore.managed(name, passphrase, entries) == state_return + + +def test_cert_update_no_sha1_fingerprint_in_x509(): + """ + Test for existing value_present + """ + + cert_return = [ + { + "valid_until": "August 21 2017", + "sha1": "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( + __salt_system_encoding__ + ), + "valid_start": "August 22 2012", + "type": "TrustedCertEntry", + "alias": "stringhost", + "expired": True, + } + ] + sha1_return = b"07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5" + x509_return = { + "Not After": "2017-08-21 05:26:54", + "Subject Hash": "97:95:14:4F", + "Serial Number": "0D:FA", + "SHA-256 Finger Print": "5F:0F:B5:16:65:81:AA:E6:4A:10:1C:15:83:B1:BE:BE:74:E8:14:A9:1E:7A:8A:14:BA:1E:83:5D:78:F6:E9:E7", + "MD5 Finger Print": "80:E6:17:AF:78:D8:E4:B8:FB:5F:41:3A:27:1D:CC:F2", + "Version": 1, + "Key Size": 512, + "Public Key": ( + "-----BEGIN PUBLIC" + " KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJv8ZpB5hEK7qxP9K3v43hUS5fGT4waK\ne7ix4Z4mu5UBv+cw7WSFAt0Vaag0sAbsPzU8Hhsrj/qPABvfB8asUwcCAwEAAQ==\n-----END" + " PUBLIC KEY-----\n" + ), + "Issuer": { + "C": "JP", + "organizationName": "Frank4DD", + "CN": "Frank4DD Web CA", + "SP": "Tokyo", + "L": "Chuo-ku", + "emailAddress": "support@frank4dd.com", + "OU": "WebCert Support", + }, + "Issuer Hash": "92:DA:45:6B", + "Not Before": "2012-08-22 05:26:54", + "Subject": { + "C": "JP", + "SP": "Tokyo", + "organizationName": "Frank4DD", + "CN": "www.example.com", + }, + } + + name = "keystore.jks" + passphrase = "changeit" + entries = [ + { + "alias": "stringhost", + "certificate": """-----BEGIN CERTIFICATE----- + MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG + A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE + MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl + YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw + ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE + CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs + ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl + 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID + AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx + 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy + 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 + Hn+GmxZA\n-----END CERTIFICATE-----""", + } + ] + + test_return = { + "name": name, + "changes": {}, + "result": True, + "comment": "No changes made.\n", + } + with patch("os.path.exists", return_value=True): + with patch.dict(keystore.__opts__, {"test": True}): + with patch.dict( + keystore.__salt__, + { + "keystore.list": MagicMock(return_value=cert_return), + "x509.read_certificate": MagicMock(return_value=x509_return), + "keystore.get_sha1": MagicMock(return_value=sha1_return), + }, + ): + assert keystore.managed(name, passphrase, entries) == test_return + + +def test_new_file(): + """ + Test for existing value_present + """ + name = "keystore.jks" + passphrase = "changeit" + entries = [ + { + "alias": "stringhost", + "certificate": """-----BEGIN CERTIFICATE----- + MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG + A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE + MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl + YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw + ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE + CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs + ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl + 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID + AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx + 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy + 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 + Hn+GmxZA\n-----END CERTIFICATE-----""", + } + ] + + test_return = { + "name": name, + "changes": {}, + "result": None, + "comment": "Alias stringhost would have been added\n", + } + state_return = { + "name": name, + "changes": {"stringhost": "Added"}, + "result": True, + "comment": "Alias stringhost added.\n", + } + + with patch.dict(keystore.__opts__, {"test": True}): + assert keystore.managed(name, passphrase, entries) == test_return + + with patch("os.path.exists", return_value=False): + with patch.dict( + keystore.__salt__, + { + "keystore.remove": MagicMock(return_value=True), + "keystore.add": MagicMock(return_value=True), + }, + ): + assert keystore.managed(name, passphrase, entries) == state_return + + +def test_force_remove(): + """ + Test for existing value_present + """ + + cert_return = [ + { + "valid_until": "August 21 2017", + "sha1": "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( + __salt_system_encoding__ + ), + "valid_start": "August 22 2012", + "type": "TrustedCertEntry", + "alias": "oldhost", + "expired": True, + } + ] + x509_return = { + "Not After": "2017-08-21 05:26:54", + "Subject Hash": "97:95:14:4F", + "Serial Number": "0D:FA", + "SHA1 Finger Print": ( + "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D6".encode( + __salt_system_encoding__ + ) + ), + "SHA-256 Finger Print": "5F:0F:B5:16:65:81:AA:E6:4A:10:1C:15:83:B1:BE:BE:74:E8:14:A9:1E:7A:8A:14:BA:1E:83:5D:78:F6:E9:E7", + "MD5 Finger Print": "80:E6:17:AF:78:D8:E4:B8:FB:5F:41:3A:27:1D:CC:F2", + "Version": 1, + "Key Size": 512, + "Public Key": ( + "-----BEGIN PUBLIC" + " KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJv8ZpB5hEK7qxP9K3v43hUS5fGT4waK\ne7ix4Z4mu5UBv+cw7WSFAt0Vaag0sAbsPzU8Hhsrj/qPABvfB8asUwcCAwEAAQ==\n-----END" + " PUBLIC KEY-----\n" + ), + "Issuer": { + "C": "JP", + "organizationName": "Frank4DD", + "CN": "Frank4DD Web CA", + "SP": "Tokyo", + "L": "Chuo-ku", + "emailAddress": "support@frank4dd.com", + "OU": "WebCert Support", + }, + "Issuer Hash": "92:DA:45:6B", + "Not Before": "2012-08-22 05:26:54", + "Subject": { + "C": "JP", + "SP": "Tokyo", + "organizationName": "Frank4DD", + "CN": "www.example.com", + }, + } + + name = "keystore.jks" + passphrase = "changeit" + entries = [ + { + "alias": "stringhost", + "certificate": """-----BEGIN CERTIFICATE----- + MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG + A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE + MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl + YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw + ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE + CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs + ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl + 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID + AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx + 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy + 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 + Hn+GmxZA\n-----END CERTIFICATE-----""", + } + ] + + test_return = { + "name": name, + "changes": {}, + "result": None, + "comment": ( + "Alias stringhost would have been updated\nAlias oldhost would have" + " been removed" + ), + } + state_return = { + "name": name, + "changes": {"oldhost": "Removed", "stringhost": "Updated"}, + "result": True, + "comment": "Alias stringhost updated.\nAlias oldhost removed.\n", + } + + with patch.dict(keystore.__opts__, {"test": True}): + with patch("os.path.exists", return_value=True): + with patch.dict( + keystore.__salt__, + { + "keystore.list": MagicMock(return_value=cert_return), + "x509.read_certificate": MagicMock(return_value=x509_return), + }, + ): + assert ( + keystore.managed(name, passphrase, entries, force_remove=True) + == test_return + ) + + with patch("os.path.exists", return_value=True): + with patch.dict( + keystore.__salt__, + { + "keystore.list": MagicMock(return_value=cert_return), + "x509.read_certificate": MagicMock(return_value=x509_return), + "keystore.remove": MagicMock(return_value=True), + "keystore.add": MagicMock(return_value=True), + }, + ): + assert ( + keystore.managed(name, passphrase, entries, force_remove=True) + == state_return + ) diff --git a/tests/pytests/unit/states/test_kubernetes.py b/tests/pytests/unit/states/test_kubernetes.py new file mode 100644 index 000000000000..88641ae87f7c --- /dev/null +++ b/tests/pytests/unit/states/test_kubernetes.py @@ -0,0 +1,811 @@ +""" + :codeauthor: :email:`Jeff Schroeder ` + + Test cases for salt.states.kubernetes +""" + +import base64 +from contextlib import contextmanager + +import pytest + +import salt.modules.kubernetesmod as kubernetesmod +import salt.states.kubernetes as kubernetes +import salt.utils.stringutils +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.skipif( + kubernetesmod.HAS_LIBS is False, + reason="Kubernetes client lib is not installed.", + ) +] + + +@pytest.fixture +def configure_loader_modules(): + return {kubernetes: {"__env__": "base"}} + + +@contextmanager +def mock_func(func_name, return_value, test=False): + """ + Mock any of the kubernetes state function return values and set + the test options. + """ + name = "kubernetes.{}".format(func_name) + mocked = {name: MagicMock(return_value=return_value)} + with patch.dict(kubernetes.__salt__, mocked) as patched: + with patch.dict(kubernetes.__opts__, {"test": test}): + yield patched + + +def make_configmap(name, namespace="default", data=None): + return make_ret_dict( + kind="ConfigMap", + name=name, + namespace=namespace, + data=data, + ) + + +def make_secret(name, namespace="default", data=None): + secret_data = make_ret_dict( + kind="Secret", + name=name, + namespace=namespace, + data=data, + ) + # Base64 all of the values just like kubectl does + for key, value in secret_data["data"].items(): + secret_data["data"][key] = base64.b64encode( + salt.utils.stringutils.to_bytes(value) + ) + + return secret_data + + +def make_node_labels(name="minikube"): + return { + "kubernetes.io/hostname": name, + "beta.kubernetes.io/os": "linux", + "beta.kubernetes.io/arch": "amd64", + "failure-domain.beta.kubernetes.io/region": "us-west-1", + } + + +def make_node(name="minikube"): + node_data = make_ret_dict(kind="Node", name="minikube") + node_data.update( + { + "api_version": "v1", + "kind": "Node", + "metadata": { + "annotations": {"node.alpha.kubernetes.io/ttl": "0"}, + "labels": make_node_labels(name=name), + "name": name, + "namespace": None, + "link": "/api/v1/nodes/{name}".format(name=name), + "uid": "7811b8ae-c1a1-11e7-a55a-0800279fb61e", + }, + "spec": {"external_id": name}, + "status": {}, + } + ) + return node_data + + +def make_namespace(name="default"): + namespace_data = make_ret_dict(kind="Namespace", name=name) + del namespace_data["data"] + namespace_data.update( + { + "status": {"phase": "Active"}, + "spec": {"finalizers": ["kubernetes"]}, + "metadata": { + "name": name, + "namespace": None, + "labels": None, + "link": "/api/v1/namespaces/{namespace}".format( + namespace=name, + ), + "annotations": None, + "uid": "752fceeb-c1a1-11e7-a55a-0800279fb61e", + }, + } + ) + return namespace_data + + +def make_ret_dict(kind, name, namespace=None, data=None): + """ + Make a minimal example configmap or secret for using in mocks + """ + + assert kind in ("Secret", "ConfigMap", "Namespace", "Node") + + if data is None: + data = {} + + link = "/api/v1/namespaces/{namespace}/{kind}s/{name}".format( + namespace=namespace, + kind=kind.lower(), + name=name, + ) + + return_data = { + "kind": kind, + "data": data, + "api_version": "v1", + "metadata": { + "name": name, + "labels": None, + "namespace": namespace, + "link": link, + "annotations": {"kubernetes.io/change-cause": "salt-call state.apply"}, + }, + } + return return_data + + +def test_configmap_present__fail(): + error = kubernetes.configmap_present( + name="testme", + data={1: 1}, + source="salt://beyond/oblivion.jinja", + ) + assert error == { + "changes": {}, + "result": False, + "name": "testme", + "comment": "'source' cannot be used in combination with 'data'", + } + + +def test_configmap_present__create_test_true(): + # Create a new configmap with test=True + with mock_func("show_configmap", return_value=None, test=True): + ret = kubernetes.configmap_present( + name="example", + data={"example.conf": "# empty config file"}, + ) + assert ret == { + "comment": "The configmap is going to be created", + "changes": {}, + "name": "example", + "result": None, + } + + +def test_configmap_present__create(): + # Create a new configmap + with mock_func("show_configmap", return_value=None): + cm = make_configmap( + name="test", + namespace="default", + data={"foo": "bar"}, + ) + with mock_func("create_configmap", return_value=cm): + actual = kubernetes.configmap_present( + name="test", + data={"foo": "bar"}, + ) + assert actual == { + "comment": "", + "changes": {"data": {"foo": "bar"}}, + "name": "test", + "result": True, + } + + +def test_configmap_present__create_no_data(): + # Create a new configmap with no 'data' attribute + with mock_func("show_configmap", return_value=None): + cm = make_configmap( + name="test", + namespace="default", + ) + with mock_func("create_configmap", return_value=cm): + actual = kubernetes.configmap_present(name="test") + assert actual == { + "comment": "", + "changes": {"data": {}}, + "name": "test", + "result": True, + } + + +def test_configmap_present__replace_test_true(): + cm = make_configmap( + name="settings", + namespace="saltstack", + data={"foobar.conf": "# Example configuration"}, + ) + with mock_func("show_configmap", return_value=cm, test=True): + ret = kubernetes.configmap_present( + name="settings", + namespace="saltstack", + data={"foobar.conf": "# Example configuration"}, + ) + assert ret == { + "comment": "The configmap is going to be replaced", + "changes": {}, + "name": "settings", + "result": None, + } + + +def test_configmap_present__replace(): + cm = make_configmap(name="settings", data={"action": "make=war"}) + # Replace an existing configmap + with mock_func("show_configmap", return_value=cm): + new_cm = cm.copy() + new_cm.update({"data": {"action": "make=peace"}}) + with mock_func("replace_configmap", return_value=new_cm): + actual = kubernetes.configmap_present( + name="settings", + data={"action": "make=peace"}, + ) + assert actual == { + "comment": ("The configmap is already present. Forcing recreation"), + "changes": {"data": {"action": "make=peace"}}, + "name": "settings", + "result": True, + } + + +def test_configmap_absent__noop_test_true(): + # Nothing to delete with test=True + with mock_func("show_configmap", return_value=None, test=True): + actual = kubernetes.configmap_absent(name="NOT_FOUND") + assert actual == { + "comment": "The configmap does not exist", + "changes": {}, + "name": "NOT_FOUND", + "result": None, + } + + +def test_configmap_absent__test_true(): + # Configmap exists with test=True + cm = make_configmap(name="deleteme", namespace="default") + with mock_func("show_configmap", return_value=cm, test=True): + actual = kubernetes.configmap_absent(name="deleteme") + assert actual == { + "comment": "The configmap is going to be deleted", + "changes": {}, + "name": "deleteme", + "result": None, + } + + +def test_configmap_absent__noop(): + # Nothing to delete + with mock_func("show_configmap", return_value=None): + actual = kubernetes.configmap_absent(name="NOT_FOUND") + assert actual == { + "comment": "The configmap does not exist", + "changes": {}, + "name": "NOT_FOUND", + "result": True, + } + + +def test_configmap_absent(): + # Configmap exists, delete it! + cm = make_configmap(name="deleteme", namespace="default") + with mock_func("show_configmap", return_value=cm): + # The return from this module isn't used in the state + with mock_func("delete_configmap", return_value={}): + actual = kubernetes.configmap_absent(name="deleteme") + assert actual == { + "comment": "ConfigMap deleted", + "changes": { + "kubernetes.configmap": { + "new": "absent", + "old": "present", + }, + }, + "name": "deleteme", + "result": True, + } + + +def test_secret_present__fail(): + actual = kubernetes.secret_present( + name="sekret", + data={"password": "monk3y"}, + source="salt://nope.jinja", + ) + assert actual == { + "changes": {}, + "result": False, + "name": "sekret", + "comment": "'source' cannot be used in combination with 'data'", + } + + +def test_secret_present__exists_test_true(): + secret = make_secret(name="sekret") + new_secret = secret.copy() + new_secret.update({"data": {"password": "uncle"}}) + # Secret exists already and needs replacing with test=True + with mock_func("show_secret", return_value=secret): + with mock_func("replace_secret", return_value=new_secret, test=True): + actual = kubernetes.secret_present( + name="sekret", + data={"password": "uncle"}, + ) + assert actual == { + "changes": {}, + "result": None, + "name": "sekret", + "comment": "The secret is going to be replaced", + } + + +def test_secret_present__exists(): + # Secret exists and gets replaced + secret = make_secret(name="sekret", data={"password": "booyah"}) + with mock_func("show_secret", return_value=secret): + with mock_func("replace_secret", return_value=secret): + actual = kubernetes.secret_present( + name="sekret", + data={"password": "booyah"}, + ) + assert actual == { + "changes": {"data": ["password"]}, + "result": True, + "name": "sekret", + "comment": "The secret is already present. Forcing recreation", + } + + +def test_secret_present__create(): + # Secret exists and gets replaced + secret = make_secret(name="sekret", data={"password": "booyah"}) + with mock_func("show_secret", return_value=None): + with mock_func("create_secret", return_value=secret): + actual = kubernetes.secret_present( + name="sekret", + data={"password": "booyah"}, + ) + assert actual == { + "changes": {"data": ["password"]}, + "result": True, + "name": "sekret", + "comment": "", + } + + +def test_secret_present__create_no_data(): + # Secret exists and gets replaced + secret = make_secret(name="sekret") + with mock_func("show_secret", return_value=None): + with mock_func("create_secret", return_value=secret): + actual = kubernetes.secret_present(name="sekret") + assert actual == { + "changes": {"data": []}, + "result": True, + "name": "sekret", + "comment": "", + } + + +def test_secret_present__create_test_true(): + # Secret exists and gets replaced with test=True + secret = make_secret(name="sekret") + with mock_func("show_secret", return_value=None): + with mock_func("create_secret", return_value=secret, test=True): + actual = kubernetes.secret_present(name="sekret") + assert actual == { + "changes": {}, + "result": None, + "name": "sekret", + "comment": "The secret is going to be created", + } + + +def test_secret_absent__noop_test_true(): + with mock_func("show_secret", return_value=None, test=True): + actual = kubernetes.secret_absent(name="sekret") + assert actual == { + "changes": {}, + "result": None, + "name": "sekret", + "comment": "The secret does not exist", + } + + +def test_secret_absent__noop(): + with mock_func("show_secret", return_value=None): + actual = kubernetes.secret_absent(name="passwords") + assert actual == { + "changes": {}, + "result": True, + "name": "passwords", + "comment": "The secret does not exist", + } + + +def test_secret_absent__delete_test_true(): + secret = make_secret(name="credentials", data={"redis": "letmein"}) + with mock_func("show_secret", return_value=secret): + with mock_func("delete_secret", return_value=secret, test=True): + actual = kubernetes.secret_absent(name="credentials") + assert actual == { + "changes": {}, + "result": None, + "name": "credentials", + "comment": "The secret is going to be deleted", + } + + +def test_secret_absent__delete(): + secret = make_secret(name="foobar", data={"redis": "letmein"}) + deleted = { + "status": None, + "kind": "Secret", + "code": None, + "reason": None, + "details": None, + "message": None, + "api_version": "v1", + "metadata": { + "link": "/api/v1/namespaces/default/secrets/foobar", + "resource_version": "30292", + }, + } + with mock_func("show_secret", return_value=secret): + with mock_func("delete_secret", return_value=deleted): + actual = kubernetes.secret_absent(name="foobar") + assert actual == { + "changes": { + "kubernetes.secret": {"new": "absent", "old": "present"}, + }, + "result": True, + "name": "foobar", + "comment": "Secret deleted", + } + + +def test_node_label_present__add_test_true(): + labels = make_node_labels() + with mock_func("node_labels", return_value=labels, test=True): + actual = kubernetes.node_label_present( + name="com.zoo-animal", + node="minikube", + value="monkey", + ) + assert actual == { + "changes": {}, + "result": None, + "name": "com.zoo-animal", + "comment": "The label is going to be set", + } + + +def test_node_label_present__add(): + node_data = make_node() + # Remove some of the defaults to make it simpler + node_data["metadata"]["labels"] = { + "beta.kubernetes.io/os": "linux", + } + labels = node_data["metadata"]["labels"] + + with mock_func("node_labels", return_value=labels): + with mock_func("node_add_label", return_value=node_data): + actual = kubernetes.node_label_present( + name="failure-domain.beta.kubernetes.io/zone", + node="minikube", + value="us-central1-a", + ) + assert actual == { + "comment": "", + "changes": { + "minikube.failure-domain.beta.kubernetes.io/zone": { + "new": { + "failure-domain.beta.kubernetes.io/zone": ("us-central1-a"), + "beta.kubernetes.io/os": "linux", + }, + "old": {"beta.kubernetes.io/os": "linux"}, + }, + }, + "name": "failure-domain.beta.kubernetes.io/zone", + "result": True, + } + + +def test_node_label_present__already_set(): + node_data = make_node() + labels = node_data["metadata"]["labels"] + with mock_func("node_labels", return_value=labels): + with mock_func("node_add_label", return_value=node_data): + actual = kubernetes.node_label_present( + name="failure-domain.beta.kubernetes.io/region", + node="minikube", + value="us-west-1", + ) + assert actual == { + "changes": {}, + "result": True, + "name": "failure-domain.beta.kubernetes.io/region", + "comment": ("The label is already set and has the specified value"), + } + + +def test_node_label_present__update_test_true(): + node_data = make_node() + labels = node_data["metadata"]["labels"] + with mock_func("node_labels", return_value=labels): + with mock_func("node_add_label", return_value=node_data, test=True): + actual = kubernetes.node_label_present( + name="failure-domain.beta.kubernetes.io/region", + node="minikube", + value="us-east-1", + ) + assert actual == { + "changes": {}, + "result": None, + "name": "failure-domain.beta.kubernetes.io/region", + "comment": "The label is going to be updated", + } + + +def test_node_label_present__update(): + node_data = make_node() + # Remove some of the defaults to make it simpler + node_data["metadata"]["labels"] = { + "failure-domain.beta.kubernetes.io/region": "us-west-1", + } + labels = node_data["metadata"]["labels"] + with mock_func("node_labels", return_value=labels): + with mock_func("node_add_label", return_value=node_data): + actual = kubernetes.node_label_present( + name="failure-domain.beta.kubernetes.io/region", + node="minikube", + value="us-east-1", + ) + assert actual == { + "changes": { + "minikube.failure-domain.beta.kubernetes.io/region": { + "new": { + "failure-domain.beta.kubernetes.io/region": ("us-east-1") + }, + "old": { + "failure-domain.beta.kubernetes.io/region": ("us-west-1") + }, + } + }, + "result": True, + "name": "failure-domain.beta.kubernetes.io/region", + "comment": "The label is already set, changing the value", + } + + +def test_node_label_absent__noop_test_true(): + labels = make_node_labels() + with mock_func("node_labels", return_value=labels, test=True): + actual = kubernetes.node_label_absent( + name="non-existent-label", + node="minikube", + ) + assert actual == { + "changes": {}, + "result": None, + "name": "non-existent-label", + "comment": "The label does not exist", + } + + +def test_node_label_absent__noop(): + labels = make_node_labels() + with mock_func("node_labels", return_value=labels): + actual = kubernetes.node_label_absent( + name="non-existent-label", + node="minikube", + ) + assert actual == { + "changes": {}, + "result": True, + "name": "non-existent-label", + "comment": "The label does not exist", + } + + +def test_node_label_absent__delete_test_true(): + labels = make_node_labels() + with mock_func("node_labels", return_value=labels, test=True): + actual = kubernetes.node_label_absent( + name="failure-domain.beta.kubernetes.io/region", + node="minikube", + ) + assert actual == { + "changes": {}, + "result": None, + "name": "failure-domain.beta.kubernetes.io/region", + "comment": "The label is going to be deleted", + } + + +def test_node_label_absent__delete(): + node_data = make_node() + labels = node_data["metadata"]["labels"].copy() + + node_data["metadata"]["labels"].pop("failure-domain.beta.kubernetes.io/region") + + with mock_func("node_labels", return_value=labels): + with mock_func("node_remove_label", return_value=node_data): + actual = kubernetes.node_label_absent( + name="failure-domain.beta.kubernetes.io/region", + node="minikube", + ) + assert actual == { + "result": True, + "changes": { + "kubernetes.node_label": { + "new": "absent", + "old": "present", + } + }, + "comment": "Label removed from node", + "name": "failure-domain.beta.kubernetes.io/region", + } + + +def test_namespace_present__create_test_true(): + with mock_func("show_namespace", return_value=None, test=True): + actual = kubernetes.namespace_present(name="saltstack") + assert actual == { + "changes": {}, + "result": None, + "name": "saltstack", + "comment": "The namespace is going to be created", + } + + +def test_namespace_present__create(): + namespace_data = make_namespace(name="saltstack") + with mock_func("show_namespace", return_value=None): + with mock_func("create_namespace", return_value=namespace_data): + actual = kubernetes.namespace_present(name="saltstack") + assert actual == { + "changes": {"namespace": {"new": namespace_data, "old": {}}}, + "result": True, + "name": "saltstack", + "comment": "", + } + + +def test_namespace_present__noop_test_true(): + namespace_data = make_namespace(name="saltstack") + with mock_func("show_namespace", return_value=namespace_data, test=True): + actual = kubernetes.namespace_present(name="saltstack") + assert actual == { + "changes": {}, + "result": None, + "name": "saltstack", + "comment": "The namespace already exists", + } + + +def test_namespace_present__noop(): + namespace_data = make_namespace(name="saltstack") + with mock_func("show_namespace", return_value=namespace_data): + actual = kubernetes.namespace_present(name="saltstack") + assert actual == { + "changes": {}, + "result": True, + "name": "saltstack", + "comment": "The namespace already exists", + } + + +def test_namespace_absent__noop_test_true(): + with mock_func("show_namespace", return_value=None, test=True): + actual = kubernetes.namespace_absent(name="salt") + assert actual == { + "changes": {}, + "result": None, + "name": "salt", + "comment": "The namespace does not exist", + } + + +def test_namespace_absent__noop(): + with mock_func("show_namespace", return_value=None): + actual = kubernetes.namespace_absent(name="salt") + assert actual == { + "changes": {}, + "result": True, + "name": "salt", + "comment": "The namespace does not exist", + } + + +def test_namespace_absent__delete_test_true(): + namespace_data = make_namespace(name="salt") + with mock_func("show_namespace", return_value=namespace_data, test=True): + actual = kubernetes.namespace_absent(name="salt") + assert actual == { + "changes": {}, + "result": None, + "name": "salt", + "comment": "The namespace is going to be deleted", + } + + +def test_namespace_absent__delete_code_200(): + namespace_data = make_namespace(name="salt") + deleted = namespace_data.copy() + deleted["code"] = 200 + deleted.update({"code": 200, "message": None}) + with mock_func("show_namespace", return_value=namespace_data): + with mock_func("delete_namespace", return_value=deleted): + actual = kubernetes.namespace_absent(name="salt") + assert actual == { + "changes": { + "kubernetes.namespace": {"new": "absent", "old": "present"} + }, + "result": True, + "name": "salt", + "comment": "Terminating", + } + + +def test_namespace_absent__delete_status_terminating(): + namespace_data = make_namespace(name="salt") + deleted = namespace_data.copy() + deleted.update( + { + "code": None, + "status": "Terminating namespace", + "message": "Terminating this shizzzle yo", + } + ) + with mock_func("show_namespace", return_value=namespace_data): + with mock_func("delete_namespace", return_value=deleted): + actual = kubernetes.namespace_absent(name="salt") + assert actual == { + "changes": { + "kubernetes.namespace": {"new": "absent", "old": "present"} + }, + "result": True, + "name": "salt", + "comment": "Terminating this shizzzle yo", + } + + +def test_namespace_absent__delete_status_phase_terminating(): + # This is what kubernetes 1.8.0 looks like when deleting namespaces + namespace_data = make_namespace(name="salt") + deleted = namespace_data.copy() + deleted.update({"code": None, "message": None, "status": {"phase": "Terminating"}}) + with mock_func("show_namespace", return_value=namespace_data): + with mock_func("delete_namespace", return_value=deleted): + actual = kubernetes.namespace_absent(name="salt") + assert actual == { + "changes": { + "kubernetes.namespace": {"new": "absent", "old": "present"} + }, + "result": True, + "name": "salt", + "comment": "Terminating", + } + + +def test_namespace_absent__delete_error(): + namespace_data = make_namespace(name="salt") + deleted = namespace_data.copy() + deleted.update({"code": 418, "message": "I' a teapot!", "status": None}) + with mock_func("show_namespace", return_value=namespace_data): + with mock_func("delete_namespace", return_value=deleted): + actual = kubernetes.namespace_absent(name="salt") + assert actual == { + "changes": {}, + "result": False, + "name": "salt", + "comment": "Something went wrong, response: {}".format( + deleted, + ), + } diff --git a/tests/pytests/unit/states/test_linux_acl.py b/tests/pytests/unit/states/test_linux_acl.py new file mode 100644 index 000000000000..60bbe55f51c0 --- /dev/null +++ b/tests/pytests/unit/states/test_linux_acl.py @@ -0,0 +1,538 @@ +""" + :codeauthor: Jayesh Kariya + + Test cases for salt.states.linux_acl +""" + +import pytest + +import salt.states.linux_acl as linux_acl +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + +pytestmark = [ + pytest.mark.skip_unless_on_linux( + reason="Only run on Linux", + ) +] + + +@pytest.fixture +def configure_loader_modules(): + return {linux_acl: {}} + + +def test_present(): + """ + Test to ensure a Linux ACL is present + """ + maxDiff = None + name = "/root" + acl_type = "users" + acl_name = "damian" + perms = "rwx" + + mock = MagicMock( + side_effect=[ + {name: {acl_type: [{acl_name: {"octal": 5}}]}}, + {name: {acl_type: [{acl_name: {"octal": 5}}]}}, + {name: {acl_type: [{acl_name: {"octal": 5}}]}}, + {name: {acl_type: [{}]}}, + {name: {acl_type: [{}]}}, + {name: {acl_type: [{}]}}, + { + name: {acl_type: [{acl_name: {"octal": 7}}]}, + name + "/foo": {acl_type: [{acl_name: {"octal": 5}}]}, + }, + { + name: {acl_type: [{acl_name: {"octal": 7}}]}, + name + "/foo": {acl_type: [{acl_name: {"octal": 7}}]}, + }, + {name: {acl_type: ""}}, + { + name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + }, + { + name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + }, + { + name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, + }, + ] + ) + mock_modfacl = MagicMock(return_value=True) + + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Updated permissions will be applied for {}: r-x -> {}".format( + acl_name, perms + ) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + }, + "old": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": "r-x", + }, + }, + "result": None, + } + + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # Update - test=False + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Updated permissions for {}".format(acl_name) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + }, + "old": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": "r-x", + }, + }, + "result": True, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # Update - modfacl error + with patch.dict( + linux_acl.__salt__, + {"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))}, + ): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Error updating permissions for {}: Custom err".format(acl_name) + ret = { + "name": name, + "comment": comt, + "changes": {}, + "result": False, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # New - test=True + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "New permissions will be applied for {}: {}".format( + acl_name, perms + ) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + } + }, + "result": None, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # New - test=False + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Applied new permissions for {}".format(acl_name) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + } + }, + "result": True, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + # New - modfacl error + with patch.dict( + linux_acl.__salt__, + {"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))}, + ): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Error updating permissions for {}: Custom err".format(acl_name) + ret = { + "name": name, + "comment": comt, + "changes": {}, + "result": False, + } + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + + # New - recurse true + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Updated permissions will be applied for {}: rwx -> {}".format( + acl_name, perms + ) + ret = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": perms, + }, + "old": { + "acl_name": acl_name, + "acl_type": acl_type, + "perms": "rwx", + }, + }, + "result": None, + } + + assert ( + linux_acl.present(name, acl_type, acl_name, perms, recurse=True) + == ret + ) + + # New - recurse true - nothing to do + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Permissions are in the desired state" + ret = {"name": name, "comment": comt, "changes": {}, "result": True} + + assert ( + linux_acl.present(name, acl_type, acl_name, perms, recurse=True) + == ret + ) + + # No acl type + comt = "ACL Type does not exist" + ret = {"name": name, "comment": comt, "result": False, "changes": {}} + assert linux_acl.present(name, acl_type, acl_name, perms) == ret + + # default recurse false - nothing to do + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Permissions are in the desired state" + ret = {"name": name, "comment": comt, "changes": {}, "result": True} + + assert ( + linux_acl.present( + name, "d:" + acl_type, acl_name, perms, recurse=False + ) + == ret + ) + + # default recurse false - nothing to do + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Permissions are in the desired state" + ret = {"name": name, "comment": comt, "changes": {}, "result": True} + + assert ( + linux_acl.present( + name, "d:" + acl_type, acl_name, perms, recurse=False + ) + == ret + ) + + # default recurse true - nothing to do + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Permissions are in the desired state" + ret = {"name": name, "comment": comt, "changes": {}, "result": True} + + assert ( + linux_acl.present( + name, "d:" + acl_type, acl_name, perms, recurse=True + ) + == ret + ) + + +def test_absent(): + """ + Test to ensure a Linux ACL does not exist + """ + name = "/root" + acl_type = "users" + acl_name = "damian" + perms = "rwx" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + mock = MagicMock( + side_effect=[ + {name: {acl_type: [{acl_name: {"octal": "A"}}]}}, + {name: {acl_type: ""}}, + ] + ) + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Removing permissions" + ret.update({"comment": comt}) + assert linux_acl.absent(name, acl_type, acl_name, perms) == ret + + comt = "ACL Type does not exist" + ret.update({"comment": comt, "result": False}) + assert linux_acl.absent(name, acl_type, acl_name, perms) == ret + + +def test_list_present(): + """ + Test to ensure a Linux ACL is present + """ + name = "/root" + acl_type = "user" + acl_names = ["root", "damian", "homer"] + acl_comment = {"owner": "root", "group": "root", "file": "/root"} + perms = "rwx" + + mock = MagicMock( + side_effect=[ + { + name: { + acl_type: [ + {acl_names[0]: {"octal": "A"}}, + {acl_names[1]: {"octal": "A"}}, + {acl_names[2]: {"octal": "A"}}, + ], + "comment": acl_comment, + } + }, + { + name: { + acl_type: [ + {acl_names[0]: {"octal": "A"}}, + {acl_names[1]: {"octal": "A"}}, + ], + "comment": acl_comment, + } + }, + { + name: { + acl_type: [ + {acl_names[0]: {"octal": "A"}}, + {acl_names[1]: {"octal": "A"}}, + ] + } + }, + {name: {acl_type: [{}]}}, + {name: {acl_type: [{}]}}, + {name: {acl_type: [{}]}}, + {name: {acl_type: ""}}, + ] + ) + mock_modfacl = MagicMock(return_value=True) + + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + # Update - test=True + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Updated permissions will be applied for {}: A -> {}".format( + acl_names, perms + ) + expected = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": 7, + }, + "old": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": "A", + }, + }, + "result": None, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert ret == expected + + # Update - test=False + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Applied new permissions for {}".format(", ".join(acl_names)) + expected = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": "rwx", + } + }, + "result": True, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # Update - modfacl error + with patch.dict( + linux_acl.__salt__, + {"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))}, + ): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Error updating permissions for {}: Custom err".format(acl_names) + expected = { + "name": name, + "comment": comt, + "changes": {}, + "result": False, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # New - test=True + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "New permissions will be applied for {}: {}".format( + acl_names, perms + ) + expected = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": perms, + } + }, + "result": None, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # New - test=False + with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Applied new permissions for {}".format(", ".join(acl_names)) + expected = { + "name": name, + "comment": comt, + "changes": { + "new": { + "acl_name": ", ".join(acl_names), + "acl_type": acl_type, + "perms": perms, + } + }, + "result": True, + } + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # New - modfacl error + with patch.dict( + linux_acl.__salt__, + {"acl.modfacl": MagicMock(side_effect=CommandExecutionError("Custom err"))}, + ): + with patch.dict(linux_acl.__opts__, {"test": False}): + comt = "Error updating permissions for {}: Custom err".format(acl_names) + expected = { + "name": name, + "comment": comt, + "changes": {}, + "result": False, + } + + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + # No acl type + comt = "ACL Type does not exist" + expected = { + "name": name, + "comment": comt, + "result": False, + "changes": {}, + } + ret = linux_acl.list_present(name, acl_type, acl_names, perms) + assert expected == ret + + +def test_list_absent(): + """ + Test to ensure a Linux ACL does not exist + """ + name = "/root" + acl_type = "users" + acl_names = ["damian", "homer"] + perms = "rwx" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + mock = MagicMock( + side_effect=[ + { + name: { + acl_type: [ + {acl_names[0]: {"octal": "A"}, acl_names[1]: {"octal": "A"}} + ] + } + }, + {name: {acl_type: ""}}, + ] + ) + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Removing permissions" + ret.update({"comment": comt}) + assert linux_acl.list_absent(name, acl_type, acl_names, perms) == ret + + comt = "ACL Type does not exist" + ret.update({"comment": comt, "result": False}) + assert linux_acl.list_absent(name, acl_type, acl_names) == ret + + +def test_absent_recursive(): + """ + Test to ensure a Linux ACL does not exist + """ + name = "/root" + acl_type = "users" + acl_name = "damian" + perms = "rwx" + + ret = {"name": name, "result": None, "comment": "", "changes": {}} + + mock = MagicMock( + side_effect=[ + { + name: {acl_type: [{acl_name: {"octal": 7}}]}, + name + "/foo": {acl_type: [{acl_name: {"octal": "A"}}]}, + } + ] + ) + with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): + with patch.dict(linux_acl.__opts__, {"test": True}): + comt = "Removing permissions" + ret.update({"comment": comt}) + assert ( + linux_acl.absent(name, acl_type, acl_name, perms, recurse=True) == ret + ) diff --git a/tests/pytests/unit/states/test_net_napalm_yang.py b/tests/pytests/unit/states/test_net_napalm_yang.py new file mode 100644 index 000000000000..99c99e06a7f5 --- /dev/null +++ b/tests/pytests/unit/states/test_net_napalm_yang.py @@ -0,0 +1,55 @@ +""" + :codeauthor: Anthony Shaw + + Test cases for salt.states.net_napalm_yang +""" + +import pytest + +import salt.states.net_napalm_yang as netyang +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {netyang: {}} + + +def test_managed(): + ret = {"changes": {}, "comment": "Loaded.", "name": "test", "result": False} + parse = MagicMock(return_value="abcdef") + temp_file = MagicMock(return_value="") + compliance_report = MagicMock(return_value={"complies": False}) + load_config = MagicMock(return_value={"comment": "Loaded."}) + file_remove = MagicMock() + + with patch("salt.utils.files.fopen"): + with patch.dict( + netyang.__salt__, + { + "temp.file": temp_file, + "napalm_yang.parse": parse, + "napalm_yang.load_config": load_config, + "napalm_yang.compliance_report": compliance_report, + "file.remove": file_remove, + }, + ): + with patch.dict(netyang.__opts__, {"test": False}): + assert netyang.managed("test", "test", models=("model1",)) == ret + assert parse.called + assert temp_file.called + assert compliance_report.called + assert load_config.called + assert file_remove.called + + +def test_configured(): + ret = {"changes": {}, "comment": "Loaded.", "name": "test", "result": False} + load_config = MagicMock(return_value={"comment": "Loaded."}) + + with patch("salt.utils.files.fopen"): + with patch.dict(netyang.__salt__, {"napalm_yang.load_config": load_config}): + with patch.dict(netyang.__opts__, {"test": False}): + assert netyang.configured("test", "test", models=("model1",)) == ret + + assert load_config.called diff --git a/tests/pytests/unit/states/test_netconfig.py b/tests/pytests/unit/states/test_netconfig.py new file mode 100644 index 000000000000..ac39f4736cdc --- /dev/null +++ b/tests/pytests/unit/states/test_netconfig.py @@ -0,0 +1,110 @@ +""" + :codeauthor: Gareth J. Greenaway + + Test cases for salt.states.netconfig +""" + +import pytest + +import salt.modules.napalm_network as net_mod +import salt.states.netconfig as netconfig +import salt.utils.files +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + state_loader_globals = { + "__env__": "base", + "__salt__": {"net.replace_pattern": net_mod.replace_pattern}, + } + module_loader_globals = { + "__env__": "base", + "__salt__": { + "net.replace_pattern": net_mod.replace_pattern, + "net.load_config": net_mod.load_config, + }, + } + return {netconfig: state_loader_globals, net_mod: module_loader_globals} + + +def test_replace_pattern_test_is_true(): + """ + Test to replace_pattern to ensure that test=True + is being passed correctly. + """ + name = "name" + pattern = "OLD-POLICY-NAME" + repl = "new-policy-name" + + mock = MagicMock() + mock_net_replace_pattern = MagicMock() + mock_loaded_ret = MagicMock() + + with patch.dict(netconfig.__salt__, {"config.merge": mock}): + with patch.dict( + netconfig.__salt__, {"net.replace_pattern": mock_net_replace_pattern} + ): + with patch.object(salt.utils.napalm, "loaded_ret", mock_loaded_ret): + # Test if test=True is passed as argument to state.apply + with patch.dict(netconfig.__opts__, {"test": True}): + netconfig.replace_pattern(name, pattern, repl) + + # Get the args and kwargs from the mocked call net.replace_pattern + args, kwargs = mock_net_replace_pattern.call_args_list[0] + + # Verify that the keyword argument is True + assert kwargs["test"] + + # Get the args and kwargs from the mocked call to salt.utils.napalm.loaded_ret + args, kwargs = mock_loaded_ret.call_args_list[0] + + # Verify that the third positional argument is True + assert args[2] + + # Test if test=True is passed as argument to state directly + netconfig.replace_pattern(name, pattern, repl, test=True) + + # Get the args and kwargs from the mocked call net.replace_pattern + args, kwargs = mock_net_replace_pattern.call_args_list[0] + + # Verify that the keyword argument is True + assert kwargs["test"] + + # Get the args and kwargs from the mocked call to salt.utils.napalm.loaded_ret + args, kwargs = mock_loaded_ret.call_args_list[0] + + # Verify that the third positional argument is True + assert args[2] + + +def test_managed_test_is_true(): + """ + Test to managed to ensure that test=True + is being passed correctly. + """ + name = "name" + + mock = MagicMock() + mock_update_config = MagicMock() + + with patch.dict(netconfig.__salt__, {"config.merge": mock}): + with patch.object(netconfig, "_update_config", mock_update_config): + # Test if test=True is passed as argument to state.apply + with patch.dict(netconfig.__opts__, {"test": True}): + netconfig.managed(name) + + # Get the args and kwargs from the mocked call net.replace_pattern + args, kwargs = mock_update_config.call_args_list[0] + + # Verify that the keyword argument is True + assert kwargs["test"] + + # Test if test=True is passed as argument to state directly + netconfig.managed(name, test=True) + + # Get the args and kwargs from the mocked call net.replace_pattern + args, kwargs = mock_update_config.call_args_list[0] + + # Verify that the keyword argument is True + assert kwargs["test"] diff --git a/tests/pytests/unit/states/test_network.py b/tests/pytests/unit/states/test_network.py new file mode 100644 index 000000000000..5141ffbba3fe --- /dev/null +++ b/tests/pytests/unit/states/test_network.py @@ -0,0 +1,269 @@ +""" + :codeauthor: Rahul Handay + + Test cases for salt.states.network +""" + +import logging + +import pytest + +import salt.states.network as network +from tests.support.mock import MagicMock, patch + +log = logging.getLogger(__name__) + + +@pytest.fixture +def configure_loader_modules(): + return {network: {}} + + +class MockNetwork: + """ + Mock network class + """ + + def __init__(self): + pass + + @staticmethod + def interfaces(): + """ + Mock interface method + """ + ifaces = { + "salt": {"up": 1}, + "lo": {"up": 1, "inet": [{"label": "lo"}, {"label": "lo:alias1"}]}, + } + return ifaces + + +class MockGrains: + """ + Mock Grains class + """ + + def __init__(self): + pass + + @staticmethod + def grains(lis, bol): + """ + Mock grains method + """ + return {"A": "B"} + + +def test_managed(): + """ + Test to ensure that the named interface is configured properly + """ + with patch("salt.states.network.salt.utils.network", MockNetwork()), patch( + "salt.states.network.salt.loader", MockGrains() + ): + ret = {"name": "salt", "changes": {}, "result": False, "comment": ""} + + change = { + "interface": "--- \n+++ \n@@ -1 +1 @@\n-A\n+B", + "status": "Interface salt restart to validate", + } + + dunder_salt = { + "ip.get_interface": MagicMock( + side_effect=[AttributeError, "A", "A", "A", "A", "A"] + ), + "ip.build_interface": MagicMock(return_value="B"), + "saltutil.refresh_grains": MagicMock(return_value=True), + } + + with patch.dict(network.__salt__, dunder_salt): + with patch.dict( + network.__salt__, + {"ip.get_bond": MagicMock(side_effect=AttributeError)}, + ): + assert network.managed("salt", type="bond", test=True) == ret + + ret.update( + { + "comment": ( + "Interface salt is set to be" + " updated:\n--- \n+++ \n@@ -1 +1 @@\n-A\n+B" + ), + "result": None, + } + ) + assert network.managed("salt", type="stack", test=True) == ret + + ipupdown = MagicMock(return_value=True) + with patch.dict(network.__salt__, {"ip.down": ipupdown, "ip.up": ipupdown}): + ret.update( + { + "comment": "Interface salt updated.", + "result": True, + "changes": change, + } + ) + assert network.managed("salt", type="stack") == ret + + with patch.dict(network.__grains__, {"A": True}): + ret.update( + { + "result": True, + "changes": { + "interface": "--- \n+++ \n@@ -1 +1 @@\n-A\n+B", + "status": "Interface salt down", + }, + } + ) + assert network.managed("salt", type="stack", enabled=False) == ret + + mock = MagicMock(return_value=True) + with patch.dict(network.__salt__, {"ip.down": mock}): + with patch.dict( + network.__salt__, {"saltutil.refresh_modules": mock} + ): + change = { + "interface": "--- \n+++ \n@@ -1 +1 @@\n-A\n+B", + "status": "Interface lo:alias1 down", + } + ret.update( + { + "name": "lo:alias1", + "comment": "Interface lo:alias1 updated.", + "result": True, + "changes": change, + } + ) + assert ( + network.managed("lo:alias1", type="eth", enabled=False) + == ret + ) + + +def test_routes(): + """ + Test to manage network interface static routes. + """ + ret = {"name": "salt", "changes": {}, "result": False, "comment": ""} + + mock = MagicMock(side_effect=[AttributeError, False, False, "True", False, False]) + with patch.dict(network.__salt__, {"ip.get_routes": mock}): + assert network.routes("salt") == ret + + mock = MagicMock(side_effect=[False, True, "", True, True]) + with patch.dict(network.__salt__, {"ip.build_routes": mock}): + ret.update( + {"result": True, "comment": "Interface salt routes are up to date."} + ) + assert network.routes("salt", test="a") == ret + + ret.update( + { + "comment": "Interface salt routes are set to be added.", + "result": None, + } + ) + assert network.routes("salt", test="a") == ret + + ret.update( + { + "comment": ( + "Interface salt routes are set to be" + " updated:\n--- \n+++ \n@@ -1,4 +0,0 @@\n-T\n-r" + "\n-u\n-e" + ) + } + ) + assert network.routes("salt", test="a") == ret + + mock = MagicMock(side_effect=[AttributeError, True]) + with patch.dict(network.__salt__, {"ip.apply_network_settings": mock}): + ret.update( + { + "changes": {"network_routes": "Added interface salt routes."}, + "comment": "", + "result": False, + } + ) + assert network.routes("salt") == ret + + ret.update( + { + "changes": {"network_routes": "Added interface salt routes."}, + "comment": "Interface salt routes added.", + "result": True, + } + ) + assert network.routes("salt") == ret + + +def test_system(): + """ + Test to ensure that global network settings + are configured properly + """ + ret = {"name": "salt", "changes": {}, "result": False, "comment": ""} + + with patch.dict(network.__opts__, {"test": True}): + mock = MagicMock(side_effect=[AttributeError, False, False, "As"]) + with patch.dict(network.__salt__, {"ip.get_network_settings": mock}): + assert network.system("salt") == ret + + mock = MagicMock(side_effect=[False, True, ""]) + with patch.dict(network.__salt__, {"ip.build_network_settings": mock}): + ret.update( + { + "comment": "Global network settings are up to date.", + "result": True, + } + ) + assert network.system("salt") == ret + + ret.update( + { + "comment": "Global network settings are set to be added.", + "result": None, + } + ) + assert network.system("salt") == ret + + ret.update( + { + "comment": ( + "Global network settings are set to" + " be updated:\n--- \n+++ \n@@ -1,2 +0,0" + " @@\n-A\n-s" + ) + } + ) + assert network.system("salt") == ret + + with patch.dict(network.__opts__, {"test": False}): + mock = MagicMock(side_effect=[False, False]) + with patch.dict(network.__salt__, {"ip.get_network_settings": mock}): + mock = MagicMock(side_effect=[True, True]) + with patch.dict(network.__salt__, {"ip.build_network_settings": mock}): + mock = MagicMock(side_effect=[AttributeError, True]) + with patch.dict(network.__salt__, {"ip.apply_network_settings": mock}): + ret.update( + { + "changes": { + "network_settings": "Added global network settings." + }, + "comment": "", + "result": False, + } + ) + assert network.system("salt") == ret + + ret.update( + { + "changes": { + "network_settings": "Added global network settings." + }, + "comment": "Global network settings are up to date.", + "result": True, + } + ) + assert network.system("salt") == ret diff --git a/tests/pytests/unit/states/test_pip.py b/tests/pytests/unit/states/test_pip.py new file mode 100644 index 000000000000..1a71be86ac10 --- /dev/null +++ b/tests/pytests/unit/states/test_pip.py @@ -0,0 +1,70 @@ +""" + :codeauthor: Eric Graham +""" +import logging + +import pytest + +import salt.states.pip_state as pip_state +from salt.exceptions import CommandExecutionError +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def configure_loader_modules(): + return {pip_state: {"__env__": "base", "__opts__": {"test": False}}} + + +def test_issue_64169(caplog): + pkg_to_install = "nonexistent_package" + exception_message = "Invalid JSON (test_issue_64169)" + + mock_pip_list = MagicMock( + side_effect=[ + CommandExecutionError( + exception_message + ), # pre-cache the pip list (preinstall) + {}, # Checking if the pkg is already installed + {pkg_to_install: "100.10.1"}, # Confirming successful installation + ] + ) + mock_pip_version = MagicMock(return_value="100.10.1") + mock_pip_install = MagicMock(return_value={"retcode": 0, "stdout": ""}) + + with patch.dict( + pip_state.__salt__, + { + "pip.list": mock_pip_list, + "pip.version": mock_pip_version, + "pip.install": mock_pip_install, + }, + ): + with caplog.at_level(logging.WARNING): + # Call pip.installed with a specifically 'broken' pip.list. + # pip.installed should continue, but log the exception from pip.list. + # pip.installed should NOT raise an exception itself. + # noinspection PyBroadException + try: + pip_state.installed( + name=pkg_to_install, + use_wheel=False, # Set False to simplify testing + no_use_wheel=False, # ' + no_binary=False, # ' + log=None, # Regression will cause this function call to throw an AttributeError + ) + except AttributeError as exc: + # Observed behavior in #64169 + pytest.fail( + "Regression on #64169: pip_state.installed seems to be throwing an unexpected AttributeException: " + f"{exc}" + ) + + # Take 64169 further and actually confirm that the exception from pip.list got logged. + assert ( + "Pre-caching of PIP packages during states.pip.installed failed by exception " + f"from pip.list: {exception_message}" in caplog.messages + ) + + # Confirm that the state continued to install the package as expected. + # Only check the 'pkgs' parameter of pip.install + assert mock_pip_install.call_args.kwargs["pkgs"] == pkg_to_install diff --git a/tests/pytests/unit/states/test_pkg.py b/tests/pytests/unit/states/test_pkg.py index b852f27b008b..00acceaa4142 100644 --- a/tests/pytests/unit/states/test_pkg.py +++ b/tests/pytests/unit/states/test_pkg.py @@ -1,8 +1,11 @@ import logging +import textwrap import pytest import salt.modules.beacons as beaconmod +import salt.modules.cp as cp +import salt.modules.pacmanpkg as pacmanpkg import salt.modules.pkg_resource as pkg_resource import salt.modules.yumpkg as yumpkg import salt.states.beacon as beaconstate @@ -15,19 +18,32 @@ @pytest.fixture -def configure_loader_modules(): +def configure_loader_modules(minion_opts): return { + cp: { + "__opts__": minion_opts, + }, pkg: { "__env__": "base", "__salt__": {}, "__grains__": {"os": "CentOS", "os_family": "RedHat"}, - "__opts__": {"test": False, "cachedir": ""}, + "__opts__": minion_opts, "__instance_id__": "", "__low__": {}, "__utils__": {"state.gen_tag": state_utils.gen_tag}, }, - beaconstate: {"__salt__": {}, "__opts__": {}}, - beaconmod: {"__salt__": {}, "__opts__": {}}, + beaconstate: { + "__salt__": {}, + "__opts__": minion_opts, + }, + beaconmod: { + "__salt__": {}, + "__opts__": minion_opts, + }, + pacmanpkg: { + "__salt__": {}, + "__opts__": minion_opts, + }, pkg_resource: { "__salt__": {}, "__grains__": {"os": "CentOS", "os_family": "RedHat"}, @@ -35,7 +51,7 @@ def configure_loader_modules(): yumpkg: { "__salt__": {}, "__grains__": {"osarch": "x86_64", "osmajorrelease": 7}, - "__opts__": {}, + "__opts__": minion_opts, }, } @@ -77,7 +93,6 @@ def test_uptodate_with_changes(pkgs): "pkg.version": version, }, ): - # Run state with test=false with patch.dict(pkg.__opts__, {"test": False}): ret = pkg.uptodate("dummy", test=True) @@ -141,10 +156,8 @@ def test_uptodate_no_changes(): with patch.dict( pkg.__salt__, {"pkg.list_upgrades": list_upgrades, "pkg.upgrade": upgrade} ): - # Run state with test=false with patch.dict(pkg.__opts__, {"test": False}): - ret = pkg.uptodate("dummy", test=True) assert ret["result"] assert ret["changes"] == {} @@ -554,7 +567,6 @@ def test_installed_with_changes_test_true(list_pkgs): "pkg.list_pkgs": list_pkgs, }, ): - expected = {"dummy": {"new": "installed", "old": ""}} # Run state with test=true with patch.dict(pkg.__opts__, {"test": True}): @@ -563,6 +575,32 @@ def test_installed_with_changes_test_true(list_pkgs): assert ret["changes"] == expected +def test_installed_with_sources(list_pkgs, tmp_path): + """ + Test pkg.installed with passing `sources` + """ + + list_pkgs = MagicMock(return_value=list_pkgs) + pkg_source = tmp_path / "pkga-package-0.3.0.deb" + + with patch.dict( + pkg.__salt__, + { + "cp.cache_file": cp.cache_file, + "pkg.list_pkgs": list_pkgs, + "pkg_resource.pack_sources": pkg_resource.pack_sources, + "lowpkg.bin_pkg_info": MagicMock(), + }, + ), patch("salt.fileclient.get_file_client", return_value=MagicMock()): + try: + ret = pkg.installed("install-pkgd", sources=[{"pkga": str(pkg_source)}]) + assert ret["result"] is False + except TypeError as exc: + if "got multiple values for keyword argument 'saltenv'" in str(exc): + pytest.fail(f"TypeError should have not been raised: {exc}") + raise exc from None + + @pytest.mark.parametrize("action", ["removed", "purged"]) def test_removed_purged_with_changes_test_true(list_pkgs, action): """ @@ -581,7 +619,6 @@ def test_removed_purged_with_changes_test_true(list_pkgs, action): "pkg_resource.version_clean": MagicMock(return_value=None), }, ): - expected = {"pkga": {"new": "{}".format(action), "old": ""}} pkg_actions = {"removed": pkg.removed, "purged": pkg.purged} @@ -809,7 +846,6 @@ def test_installed_with_single_normalize(): ), patch.object( yumpkg, "list_holds", MagicMock() ): - expected = { "weird-name-1.2.3-1234.5.6.test7tst.x86_64": { "old": "", @@ -904,7 +940,6 @@ def test_removed_with_single_normalize(): ), patch.dict( yumpkg.__salt__, salt_dict ): - expected = { "weird-name-1.2.3-1234.5.6.test7tst.x86_64": { "old": "20220214-2.1", @@ -998,7 +1033,6 @@ def test_installed_with_single_normalize_32bit(): ), patch.dict( yumpkg.__grains__, {"os": "CentOS", "osarch": "x86_64", "osmajorrelease": 7} ): - expected = { "xz-devel.i686": { "old": "", @@ -1013,3 +1047,173 @@ def test_installed_with_single_normalize_32bit(): assert "xz-devel.i686" in call_yum_mock.mock_calls[0].args[0] assert ret["result"] assert ret["changes"] == expected + + +@pytest.mark.parametrize( + "kwargs, expected_cli_options", + ( + ( + ( + "fromrepo=foo,bar", + "someotherkwarg=test", + "disablerepo=ignored", + "enablerepo=otherignored", + "disableexcludes=this_argument_is_also_ignored", + ), + ("--disablerepo=*", "--enablerepo=foo,bar"), + ), + ( + ("enablerepo=foo", "disablerepo=bar"), + ("--disablerepo=bar", "--enablerepo=foo"), + ), + ( + ("disablerepo=foo",), + ("--disablerepo=foo",), + ), + ( + ("enablerepo=bar",), + ("--enablerepo=bar",), + ), + ), +) +def test_yumpkg_group_installed_with_repo_options( + list_pkgs, kwargs, expected_cli_options +): + """ + Test that running a pkg.group_installed with repo options on RPM-based + systems results in the correct yum/dnf groupinfo command being run by + pkg.group_info. + """ + kwargs = dict(item.split("=", 1) for item in kwargs) + run_stdout = MagicMock( + return_value=textwrap.dedent( + """\ + Group: MyGroup + Group-Id: my-group + Description: A test group + Mandatory Packages: + pkga + pkgb + """ + ) + ) + + salt_dict = { + "cmd.run_stdout": run_stdout, + "pkg.group_diff": yumpkg.group_diff, + "pkg.group_info": yumpkg.group_info, + } + + name = "MyGroup" + with patch.dict(pkg.__salt__, salt_dict), patch.dict( + yumpkg.__salt__, salt_dict + ), patch.object( + yumpkg, + "list_pkgs", + MagicMock(return_value=list_pkgs), + ): + ret = pkg.group_installed(name, **kwargs) + assert ret["result"] + assert not ret["changes"] + expected = [yumpkg._yum(), "--quiet"] + expected.extend(expected_cli_options) + expected.extend(("groupinfo", name)) + run_stdout.assert_called_once_with( + expected, + output_loglevel="trace", + python_shell=False, + ) + + +def test_pacmanpkg_group_installed_with_repo_options(list_pkgs): + """ + Test that running a pkg.group_installed with additional arguments on + platforms which use pacman does not result in a traceback, but is instead + cleanly handled and a useful comment included in the state return. + """ + salt_dict = { + "pkg.group_diff": pacmanpkg.group_diff, + } + + with patch.dict(pkg.__salt__, salt_dict), patch.dict(pacmanpkg.__salt__, salt_dict): + ret = pkg.group_installed("foo", fromrepo="bar") + assert not ret["result"] + assert not ret["changes"] + assert ret["comment"] == "Repo options are not supported on this platform" + + +def test_latest(): + """ + Test pkg.latest + """ + pkg_name = "fake_pkg" + old_version = "1.2.2" + new_version = "1.2.3" + latest_version_mock = MagicMock(return_value={pkg_name: new_version}) + current_version_mock = MagicMock(return_value={pkg_name: old_version}) + install_mock = MagicMock( + return_value={ + pkg_name: { + "new": new_version, + "old": old_version, + }, + } + ) + salt_dict = { + "pkg.latest_version": latest_version_mock, + "pkg.version": current_version_mock, + "pkg.install": install_mock, + } + with patch.dict(pkg.__salt__, salt_dict): + ret = pkg.latest(pkg_name) + assert ret.get("result", False) is True + + +def test_latest_multiple_versions(): + """ + This case arises most often when updating the kernel, where multiple versions are now installed. + + See: https://github.com/saltstack/salt/issues/60931 + """ + pkg_name = "fake_pkg" + old_version = "1.2.2" + new_version = "1.2.3" + latest_version_mock = MagicMock(return_value={pkg_name: new_version}) + current_version_mock = MagicMock(return_value={pkg_name: old_version}) + install_mock = MagicMock( + return_value={ + pkg_name: { + "new": f"{old_version},{new_version}", + "old": old_version, + }, + } + ) + salt_dict = { + "pkg.latest_version": latest_version_mock, + "pkg.version": current_version_mock, + "pkg.install": install_mock, + } + with patch.dict(pkg.__salt__, salt_dict): + ret = pkg.latest(pkg_name) + assert ret.get("result", False) is True + + +def test_latest_no_change_windows(): + """ + Test pkg.latest with no change to the package version for winrepo packages + + See: https://github.com/saltstack/salt/issues/65165 + """ + pkg_name = "fake_pkg" + version = "1.2.2" + latest_version_mock = MagicMock(return_value={pkg_name: version}) + current_version_mock = MagicMock(return_value={pkg_name: version}) + install_mock = MagicMock(return_value={pkg_name: {"install status": "success"}}) + salt_dict = { + "pkg.latest_version": latest_version_mock, + "pkg.version": current_version_mock, + "pkg.install": install_mock, + } + with patch.dict(pkg.__salt__, salt_dict): + ret = pkg.latest(pkg_name) + assert ret.get("result", False) is True diff --git a/tests/pytests/unit/states/test_saltmod.py b/tests/pytests/unit/states/test_saltmod.py deleted file mode 100644 index 073ea5fa589a..000000000000 --- a/tests/pytests/unit/states/test_saltmod.py +++ /dev/null @@ -1,55 +0,0 @@ -import pytest - -import salt.modules.saltutil as saltutil -import salt.states.saltmod as saltmod -from tests.support.mock import create_autospec, patch - - -@pytest.fixture -def configure_loader_modules(): - return {saltmod: {"__opts__": {"__role": "testsuite"}}} - - -@pytest.fixture -def fake_cmd(): - fake_cmd = create_autospec(saltutil.cmd) - with patch.dict(saltmod.__salt__, {"saltutil.cmd": fake_cmd}): - yield fake_cmd - - -@pytest.mark.parametrize( - "exclude", - [True, False], -) -def test_exclude_parameter_gets_passed(exclude, fake_cmd): - """ - Smoke test for for salt.states.statemod.state(). Ensures that we - don't take an exception if optional parameters are not specified in - __opts__ or __env__. - """ - args = ("webserver_setup", "webserver2") - expected_exclude = exclude - kwargs = { - "tgt_type": "glob", - "exclude": expected_exclude, - "highstate": True, - } - - saltmod.state(*args, **kwargs) - - call = fake_cmd.call_args[1] - assert call["kwarg"]["exclude"] == expected_exclude - - -def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd): - # Make sure we don't barf on existing behavior - args = ("webserver_setup", "webserver2") - kwargs_without_exclude = { - "tgt_type": "glob", - "highstate": True, - } - - saltmod.state(*args, **kwargs_without_exclude) - - call = fake_cmd.call_args[1] - assert "exclude" not in call["kwarg"] diff --git a/tests/pytests/unit/states/test_schedule.py b/tests/pytests/unit/states/test_schedule.py index f58a51a22a73..505bb3b5eb87 100644 --- a/tests/pytests/unit/states/test_schedule.py +++ b/tests/pytests/unit/states/test_schedule.py @@ -1,11 +1,14 @@ """ :codeauthor: Jayesh Kariya + :codeauthor: Gareth J. Greenaway """ import pytest +import salt.modules.schedule as schedule_mod import salt.states.schedule as schedule -from tests.support.mock import MagicMock, patch +from salt.utils.odict import OrderedDict +from tests.support.mock import MagicMock, mock_open, patch @pytest.fixture @@ -19,49 +22,527 @@ def test_present(): """ name = "job1" - ret = {"name": name, "changes": {}, "result": False, "comment": ""} + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + } + mock_lst = MagicMock(side_effect=[{}, {"job1": job1}]) + + mock_build_schedule = OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "job1"), + ("enabled", True), + ("jid_include", True), + ("when", "4:00am"), + ] + ) + + mock_add = { + "comment": "Added job: test-schedule to schedule.", + "result": True, + "changes": {"test-schedule": "added"}, + } + + with patch.dict( + schedule.__salt__, + { + "schedule.list": mock_lst, + "schedule.build_schedule_item": MagicMock(return_value=mock_build_schedule), + "schedule.add": MagicMock(return_value=mock_add), + }, + ): + ret = { + "name": "job1", + "result": True, + "changes": {"test-schedule": "added"}, + "comment": "Adding new job job1 to schedule", + } + _res = schedule.present(name) + assert _res == ret + + ret = { + "name": "job1", + "result": True, + "changes": {}, + "comment": "Job job1 in correct state", + } + _res = schedule.present(name) + assert _res == ret + + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + } + job1_update = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "6:00am", + } + mock_lst = MagicMock(side_effect=[{"job1": job1}, {"job1": job1_update}]) + + mock_build_schedule = OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "job1"), + ("enabled", True), + ("jid_include", True), + ("when", "6:00am"), + ] + ) + + mock_modify = { + "comment": "Modified job: test-schedule in schedule.", + "changes": { + "test-schedule": { + "old": OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "test-schedule"), + ("enabled", True), + ("jid_include", True), + ("when", "4:00am"), + ] + ), + "new": OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "test-schedule"), + ("enabled", True), + ("jid_include", True), + ("when", "6:00am"), + ] + ), + } + }, + "result": True, + } - mock_dict = MagicMock(side_effect=[ret, []]) - mock_mod = MagicMock(return_value=ret) - mock_lst = MagicMock(side_effect=[{name: {}}, {name: {}}, {}, {}]) with patch.dict( schedule.__salt__, { "schedule.list": mock_lst, - "schedule.build_schedule_item": mock_dict, - "schedule.modify": mock_mod, - "schedule.add": mock_mod, + "schedule.build_schedule_item": MagicMock(return_value=mock_build_schedule), + "schedule.modify": MagicMock(return_value=mock_modify), }, ): - assert schedule.present(name) == ret + ret = { + "name": "job1", + "result": True, + "changes": { + "test-schedule": { + "old": OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "test-schedule"), + ("enabled", True), + ("jid_include", True), + ("when", "4:00am"), + ] + ), + "new": OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "test-schedule"), + ("enabled", True), + ("jid_include", True), + ("when", "6:00am"), + ] + ), + } + }, + "comment": "Modifying job job1 in schedule", + } + _res = schedule.present(name) + assert _res == ret - with patch.dict(schedule.__opts__, {"test": False}): - assert schedule.present(name) == ret + ret = { + "name": "job1", + "result": True, + "changes": {}, + "comment": "Job job1 in correct state", + } + _res = schedule.present(name) + assert _res == ret - assert schedule.present(name) == ret + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + } + mock_lst = MagicMock(side_effect=[{}]) + mock_build_schedule = OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "job1"), + ("enabled", True), + ("jid_include", True), + ("when", "4:00am"), + ] + ) + + mock_add = { + "comment": "Job: test-schedule would be added to schedule.", + "result": True, + "changes": {}, + } + + with patch.dict( + schedule.__salt__, + { + "schedule.list": mock_lst, + "schedule.build_schedule_item": MagicMock(return_value=mock_build_schedule), + "schedule.add": MagicMock(return_value=mock_add), + }, + ): + ret = { + "name": "job1", + "result": True, + "changes": {}, + "comment": "Job: test-schedule would be added to schedule.", + } with patch.dict(schedule.__opts__, {"test": True}): - ret.update({"result": True}) - assert schedule.present(name) == ret + _res = schedule.present(name) + assert _res == ret + + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + } + job1_update = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "6:00am", + } + mock_lst = MagicMock(side_effect=[{"job1": job1}, {"job1": job1_update}]) + + mock_build_schedule = OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "job1"), + ("enabled", True), + ("jid_include", True), + ("when", "6:00am"), + ] + ) + + mock_modify = { + "comment": "Job: test-schedule would be modified in schedule.", + "changes": { + "test-schedule": { + "old": OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "test-schedule"), + ("enabled", True), + ("jid_include", True), + ("when", "4:00am"), + ] + ), + "new": OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "test-schedule"), + ("enabled", True), + ("jid_include", True), + ("when", "6:00am"), + ] + ), + } + }, + "result": True, + } + + with patch.dict( + schedule.__salt__, + { + "schedule.list": mock_lst, + "schedule.build_schedule_item": MagicMock(return_value=mock_build_schedule), + "schedule.modify": MagicMock(return_value=mock_modify), + }, + ): + ret = { + "name": "job1", + "result": True, + "changes": { + "test-schedule": { + "old": OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "test-schedule"), + ("enabled", True), + ("jid_include", True), + ("when", "4:00am"), + ] + ), + "new": OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "test-schedule"), + ("enabled", True), + ("jid_include", True), + ("when", "6:00am"), + ] + ), + } + }, + "comment": "Job: test-schedule would be modified in schedule.", + } + with patch.dict(schedule.__opts__, {"test": True}): + _res = schedule.present(name) + assert _res == ret + + # Add job to schedule when offline=True + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + "offline": True, + } + mock_lst = MagicMock(return_value={}) + + mock_build_schedule = OrderedDict( + [ + ("function", "test.ping"), + ("maxrunning", 1), + ("name", "job1"), + ("enabled", True), + ("jid_include", True), + ("when", "4:00am"), + ] + ) + + mock_add = { + "comment": "Adding new job test-schedule to schedule.", + "result": True, + "changes": {"test-schedule": "added"}, + } + + event_enter = MagicMock() + event_enter.send.side_effect = (lambda data, tag, cb=None, timeout=60: True,) + event = MagicMock() + event.__enter__.return_value = event_enter + + with patch("salt.utils.event.get_event", return_value=event): + with patch.dict( + schedule.__salt__, + { + "schedule.list": mock_lst, + "schedule.build_schedule_item": MagicMock( + return_value=mock_build_schedule + ), + "schedule.add": MagicMock(return_value=mock_add), + }, + ): + with patch.object(schedule_mod, "list_", mock_lst): + with patch.object( + schedule_mod, + "_get_schedule_config_file", + MagicMock(return_value="/etc/salt/minion.d/_schedule.conf"), + ): + with patch("salt.utils.files.fopen", mock_open()): + ret = { + "comment": "Adding new job job1 to schedule", + "result": True, + "name": "job1", + "changes": {"test-schedule": "added"}, + } + + _res = schedule.present(name, offline=True) + assert _res == ret + assert event.call_count == 0 def test_absent(): """ Test to ensure a job is absent from the schedule. """ + + # Delete job from schedule name = "job1" - ret = {"name": name, "changes": {}, "result": False, "comment": ""} + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + } + mock_lst = MagicMock(side_effect=[{"job1": job1}]) + + mock_delete = { + "comment": "Deleted job test-schedule from schedule.", + "result": True, + "changes": {"test-schedule": "removed"}, + } + + with patch.dict( + schedule.__salt__, + { + "schedule.list": mock_lst, + "schedule.delete": MagicMock(return_value=mock_delete), + }, + ): + ret = { + "name": "job1", + "result": True, + "changes": {"test-schedule": "removed"}, + "comment": "Removed job job1 from schedule", + } + _res = schedule.absent(name) + assert _res == ret + + # Delete job from schedule when job does not exist + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + } + mock_lst = MagicMock(side_effect=[{}]) + + mock_delete = { + "comment": "Job test-schedule does not exist.", + "result": True, + "changes": {}, + } + + with patch.dict( + schedule.__salt__, + { + "schedule.list": mock_lst, + "schedule.delete": MagicMock(return_value=mock_delete), + }, + ): + ret = { + "name": "job1", + "result": True, + "changes": {}, + "comment": "Job job1 not present in schedule", + } + _res = schedule.absent(name) + assert _res == ret + + # Delete job from schedule when test=True + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + } + mock_lst = MagicMock(side_effect=[{"job1": job1}]) + + mock_delete = { + "comment": "Job: job1 would be deleted from schedule.", + "result": True, + "changes": {}, + } - mock_mod = MagicMock(return_value=ret) - mock_lst = MagicMock(side_effect=[{name: {}}, {}]) with patch.dict( - schedule.__salt__, {"schedule.list": mock_lst, "schedule.delete": mock_mod} + schedule.__salt__, + { + "schedule.list": mock_lst, + "schedule.delete": MagicMock(return_value=mock_delete), + }, ): - with patch.dict(schedule.__opts__, {"test": False}): - assert schedule.absent(name) == ret + ret = { + "name": "job1", + "result": True, + "changes": {}, + "comment": "Job: job1 would be deleted from schedule.", + } with patch.dict(schedule.__opts__, {"test": True}): - comt = "Job job1 not present in schedule" - ret.update({"comment": comt, "result": True}) - assert schedule.absent(name) == ret + _res = schedule.absent(name) + assert _res == ret + + # Delete job from schedule when offline=True + job1 = { + "function": "test.ping", + "maxrunning": 1, + "name": "job1", + "enabled": True, + "jid_include": True, + "when": "4:00am", + "offline": True, + } + mock_lst = MagicMock(return_value={"job1": job1}) + + mock_delete = { + "comment": "Deleted Job job1 from schedule.", + "result": True, + "changes": {"job1": "removed"}, + } + + event_enter = MagicMock() + event_enter.send.side_effect = (lambda data, tag, cb=None, timeout=60: True,) + event = MagicMock() + event.__enter__.return_value = event_enter + + with patch("salt.utils.event.get_event", return_value=event): + with patch.dict( + schedule.__salt__, + { + "schedule.list": mock_lst, + "schedule.delete": schedule_mod.delete, + }, + ): + with patch.object(schedule_mod, "list_", mock_lst): + with patch.object( + schedule_mod, + "_get_schedule_config_file", + MagicMock(return_value="/etc/salt/minion.d/_schedule.conf"), + ): + with patch("salt.utils.files.fopen", mock_open()): + ret = { + "comment": "Removed job job1 from schedule", + "result": True, + "name": "job1", + "changes": {"job1": "removed"}, + } + + _res = schedule.absent(name, offline=True) + assert _res == ret + assert event.call_count == 0 diff --git a/tests/pytests/unit/states/test_selinux.py b/tests/pytests/unit/states/test_selinux.py index 8b30aa2e2826..20be015763c3 100644 --- a/tests/pytests/unit/states/test_selinux.py +++ b/tests/pytests/unit/states/test_selinux.py @@ -7,6 +7,8 @@ import salt.states.selinux as selinux from tests.support.mock import MagicMock, patch +pytestmark = [pytest.mark.skip_unless_on_linux] + @pytest.fixture def configure_loader_modules(): diff --git a/tests/pytests/unit/states/test_user.py b/tests/pytests/unit/states/test_user.py index 94e69d70ed02..ffbd2d7d4d16 100644 --- a/tests/pytests/unit/states/test_user.py +++ b/tests/pytests/unit/states/test_user.py @@ -123,8 +123,8 @@ def test_present_invalid_gid_change(): ) dunder_salt = { "user.info": mock_info, - "file.group_to_gid": MagicMock(side_effect=["foo"]), - "file.gid_to_group": MagicMock(side_effect=[5000, 5000]), + "file.group_to_gid": MagicMock(return_value="foo"), + "file.gid_to_group": MagicMock(return_value=5000), } with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict( user.__salt__, dunder_salt @@ -148,8 +148,8 @@ def test_present_invalid_uid_gid_change(): ) dunder_salt = { "user.info": mock_info, - "file.group_to_gid": MagicMock(side_effect=["foo"]), - "file.gid_to_group": MagicMock(side_effect=[5000, 5000]), + "file.group_to_gid": MagicMock(return_value="foo"), + "file.gid_to_group": MagicMock(return_value=5000), } with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict( user.__salt__, dunder_salt @@ -179,7 +179,7 @@ def test_present_uid_gid_change(): # get the before/after for the changes dict, and one last time to # confirm that no changes still need to be made. mock_info = MagicMock(side_effect=[before, before, after, after]) - mock_group_to_gid = MagicMock(side_effect=[5000, 5001]) + mock_group_to_gid = MagicMock(side_effect=[5000, 5000, 5001, 5001]) mock_gid_to_group = MagicMock( side_effect=["othergroup", "foo", "othergroup", "othergroup"] ) @@ -189,6 +189,8 @@ def test_present_uid_gid_change(): "user.chgid": Mock(), "file.group_to_gid": mock_group_to_gid, "file.gid_to_group": mock_gid_to_group, + "group.info": MagicMock(return_value=after), + "user.chgroups": MagicMock(return_value=True), } with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict( user.__salt__, dunder_salt @@ -254,12 +256,11 @@ def test_changes(): "file.gid_to_group": MagicMock(side_effect=[5000, 5000]), } - def mock_exists(*args): - return True - with patch.dict(user.__grains__, {"kernel": "Linux"}), patch.dict( user.__salt__, dunder_salt - ), patch.dict(user.__opts__, {"test": False}), patch("os.path.isdir", mock_exists): + ), patch.dict(user.__opts__, {"test": False}), patch( + "os.path.isdir", MagicMock(return_value=True) + ): ret = user._changes("foo", maxdays=999999, inactdays=0, warndays=7) assert ret == { "maxdays": 999999, @@ -459,3 +460,43 @@ def test_present_password_unlock(): else: unlock_password.assert_called_once() unlock_account.assert_not_called() + + +@pytest.mark.parametrize( + "current,wanted,remove,return_value,expected", + [ + (["grp1"], ["grp1"], False, MagicMock(return_value={"gid": 100}), False), + ( + ["grp1"], + ["grp1", "grp2"], + False, + MagicMock(side_effect=[{"gid": 100}, {"gid": 200}]), + True, + ), + ( + ["grp1"], + ["grp1", "grp2"], + False, + MagicMock(side_effect=[{"gid": 100}, {"gid": 100}]), + False, + ), + ( + ["grp1", "grp2"], + ["grp1"], + True, + MagicMock(side_effect=[{"gid": 100}, {"gid": 200}]), + True, + ), + ( + ["grp1", "grp2"], + ["grp1"], + True, + MagicMock(side_effect=[{"gid": 100}, {"gid": 100}]), + False, + ), + ], +) +def test__group_changes(current, wanted, remove, return_value, expected): + with patch.dict(user.__salt__, {"group.info": return_value}): + ret = user._group_changes(current, wanted, remove) + assert ret == expected diff --git a/tests/pytests/unit/states/test_win_lgpo_reg.py b/tests/pytests/unit/states/test_win_lgpo_reg.py index f57262869f4e..c9e4a2e028a3 100644 --- a/tests/pytests/unit/states/test_win_lgpo_reg.py +++ b/tests/pytests/unit/states/test_win_lgpo_reg.py @@ -1,9 +1,14 @@ +import pathlib + import pytest +import salt.modules.win_file as file import salt.modules.win_lgpo_reg as win_lgpo_reg import salt.states.win_lgpo_reg as lgpo_reg import salt.utils.files +import salt.utils.win_dacl import salt.utils.win_lgpo_reg +import salt.utils.win_reg from tests.support.mock import patch pytestmark = [ @@ -24,23 +29,48 @@ def configure_loader_modules(): "lgpo_reg.disable_value": win_lgpo_reg.disable_value, "lgpo_reg.delete_value": win_lgpo_reg.delete_value, }, + "__utils__": { + "reg.read_value": salt.utils.win_reg.read_value, + }, + }, + file: { + "__utils__": { + "dacl.set_perms": salt.utils.win_dacl.set_perms, + }, }, } @pytest.fixture -def empty_reg_pol(): +def empty_reg_pol_mach(): class_info = salt.utils.win_lgpo_reg.CLASS_INFO - reg_pol_file = class_info["Machine"]["policy_path"] - with salt.utils.files.fopen(reg_pol_file, "wb") as f: - f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + reg_pol_file = pathlib.Path(class_info["Machine"]["policy_path"]) + reg_pol_file.parent.mkdir(parents=True, exist_ok=True) + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") yield - with salt.utils.files.fopen(reg_pol_file, "wb") as f: - f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) @pytest.fixture -def reg_pol(): +def empty_reg_pol_user(): + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = pathlib.Path(class_info["User"]["policy_path"]) + reg_pol_file.parent.mkdir(parents=True, exist_ok=True) + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + reg_pol_file.write_bytes(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + +@pytest.fixture +def reg_pol_mach(): data_to_write = { r"SOFTWARE\MyKey1": { "MyValue1": { @@ -51,6 +81,10 @@ def reg_pol(): "data": " ", "type": "REG_SZ", }, + "MyValue3": { + "data": 0, + "type": "REG_DWORD", + }, }, r"SOFTWARE\MyKey2": { "MyValue3": { @@ -60,47 +94,206 @@ def reg_pol(): }, } win_lgpo_reg.write_reg_pol(data_to_write) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata=0, + vtype="REG_DWORD", + ) + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) yield + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKLM", key="SOFTWARE\\MyKey2") class_info = salt.utils.win_lgpo_reg.CLASS_INFO reg_pol_file = class_info["Machine"]["policy_path"] with salt.utils.files.fopen(reg_pol_file, "wb") as f: f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) +@pytest.fixture +def reg_pol_user(): + data_to_write = { + r"SOFTWARE\MyKey1": { + "MyValue1": { + "data": "squidward", + "type": "REG_SZ", + }, + "**del.MyValue2": { + "data": " ", + "type": "REG_SZ", + }, + "MyValue3": { + "data": 0, + "type": "REG_DWORD", + }, + }, + r"SOFTWARE\MyKey2": { + "MyValue3": { + "data": ["spongebob", "squarepants"], + "type": "REG_MULTI_SZ", + }, + }, + } + win_lgpo_reg.write_reg_pol(data_to_write, policy_class="User") + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue1", + vdata="squidward", + vtype="REG_SZ", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata=0, + vtype="REG_DWORD", + ) + salt.utils.win_reg.set_value( + hive="HKCU", + key="SOFTWARE\\MyKey2", + vname="MyValue3", + vdata=["spongebob", "squarepants"], + vtype="REG_MULTI_SZ", + ) + yield + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey1") + salt.utils.win_reg.delete_key_recursive(hive="HKCU", key="SOFTWARE\\MyKey2") + class_info = salt.utils.win_lgpo_reg.CLASS_INFO + reg_pol_file = class_info["User"]["policy_path"] + with salt.utils.files.fopen(reg_pol_file, "wb") as f: + f.write(salt.utils.win_lgpo_reg.REG_POL_HEADER.encode("utf-16-le")) + + def test_virtual_name(): assert lgpo_reg.__virtual__() == "lgpo_reg" -def test_value_present(empty_reg_pol): +def test_machine_value_present(empty_reg_pol_mach): """ - Test value.present + Test value.present in Machine policy """ result = lgpo_reg.value_present( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", v_data="1", v_type="REG_DWORD", ) expected = { "changes": { "new": { - "name": "MyValue", - "key": "SOFTWARE\\MyKey", + "pol": { + "data": 1, + "type": "REG_DWORD", + }, + "reg": { + "data": 1, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": {}, + "reg": {}, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue", + "result": True, + } + assert result == expected + + +def test_machine_value_present_similar_names(empty_reg_pol_mach): + """ + Test value.present in Machine policy + """ + lgpo_reg.value_present( + name="MyValueTwo", + key="SOFTWARE\\MyKey1", + v_data="1", + v_type="REG_DWORD", + ) + lgpo_reg.value_present( + name="MyValue", + key="SOFTWARE\\MyKey1", + v_data="1", + v_type="REG_DWORD", + ) + expected = { + "SOFTWARE\\MyKey1": { + "MyValue": { + "type": "REG_DWORD", "data": 1, + }, + "MyValueTwo": { "type": "REG_DWORD", + "data": 1, }, - "old": {}, }, - "comment": "Registry.pol value has been set", - "name": "MyValue", + } + result = win_lgpo_reg.read_reg_pol(policy_class="Machine") + assert result == expected + + +def test_machine_value_present_enforce(reg_pol_mach): + """ + Issue #64222 + Test value.present in Machine policy when the registry changes after the + state is applied. This would cause a discrepancy between the registry + setting and the value in the registry.pol file + """ + # reg_pol_mach has MyValue3 with REG_DWORD value of 0, let's set it to 1 + salt.utils.win_reg.set_value( + hive="HKLM", + key="SOFTWARE\\MyKey1", + vname="MyValue3", + vdata="1", + vtype="REG_DWORD", + ) + # Now the registry and Registry.pol file are out of sync + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data="0", + v_type="REG_DWORD", + ) + expected = { + "changes": { + "new": { + "reg": { + "data": 0, + } + }, + "old": { + "reg": { + "data": 1, + } + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue3", "result": True, } assert result == expected -def test_value_present_existing_change(reg_pol): +def test_machine_value_present_existing_change(reg_pol_mach): """ - Test value.present with existing incorrect value + Test value.present with existing incorrect value in Machine policy """ result = lgpo_reg.value_present( name="MyValue1", @@ -111,28 +304,72 @@ def test_value_present_existing_change(reg_pol): expected = { "changes": { "new": { - "name": "MyValue1", - "key": "SOFTWARE\\MyKey1", - "data": 2, - "type": "REG_DWORD", + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, }, "old": { - "name": "MyValue1", - "key": "SOFTWARE\\MyKey1", - "data": "squidward", - "type": "REG_SZ", + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue1", "result": True, } assert result == expected -def test_value_present_existing_no_change(reg_pol): +def test_machine_value_present_existing_change_dword(reg_pol_mach): + """ + Test value.present with existing incorrect value in Machine policy + """ + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data=1, + v_type="REG_DWORD", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 1, + }, + "reg": { + "data": 1, + }, + }, + "old": { + "pol": { + "data": 0, + }, + "reg": { + "data": 0, + }, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue3", + "result": True, + } + assert result == expected + + +def test_machine_value_present_existing_no_change(reg_pol_mach): """ - Test value.present with existing correct value + Test value.present with existing correct value in Machine policy """ result = lgpo_reg.value_present( name="MyValue1", @@ -142,36 +379,36 @@ def test_value_present_existing_no_change(reg_pol): ) expected = { "changes": {}, - "comment": "Registry.pol value already present", + "comment": "Policy value already present\nRegistry value already present", "name": "MyValue1", "result": True, } assert result == expected -def test_value_present_test_true(empty_reg_pol): +def test_machine_value_present_test_true(empty_reg_pol_mach): """ - Test value.present with test=True + Test value.present with test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_present( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", v_data="1", v_type="REG_DWORD", ) expected = { "changes": {}, - "comment": "Registry.pol value will be set", + "comment": "Policy value will be set\nRegistry value will be set", "name": "MyValue", "result": None, } assert result == expected -def test_value_present_existing_disabled(reg_pol): +def test_machine_value_present_existing_disabled(reg_pol_mach): """ - Test value.present with existing value that is disabled + Test value.present with existing value that is disabled in Machine policy """ result = lgpo_reg.value_present( name="MyValue2", @@ -182,28 +419,33 @@ def test_value_present_existing_disabled(reg_pol): expected = { "changes": { "new": { - "data": 2, - "key": "SOFTWARE\\MyKey1", - "name": "MyValue2", - "type": "REG_DWORD", + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, }, "old": { - "data": "**del.MyValue2", - "key": "SOFTWARE\\MyKey1", - "name": "MyValue2", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + "reg": {}, }, }, - "comment": "Registry.pol value has been set", + "comment": "Registry policy value has been set", "name": "MyValue2", "result": True, } assert result == expected -def test_value_disabled(empty_reg_pol): +def test_machine_value_disabled(empty_reg_pol_mach): """ - Test value.disabled + Test value.disabled in Machine policy """ result = lgpo_reg.value_disabled( name="MyValue1", @@ -212,23 +454,24 @@ def test_value_disabled(empty_reg_pol): expected = { "changes": { "new": { - "data": "**del.MyValue1", - "key": "SOFTWARE\\MyKey1", - "name": "MyValue1", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue1", + "type": "REG_SZ", + }, }, - "old": {}, + "old": {"pol": {}}, }, - "comment": "Registry.pol value enabled", + "comment": "Registry policy value disabled", "name": "MyValue1", "result": True, } assert result == expected -def test_value_disabled_existing_change(reg_pol): +def test_machine_value_disabled_existing_change(reg_pol_mach): """ - Test value.disabled with an existing value that is not disabled + Test value.disabled with an existing value that is not disabled in Machine + policy """ result = lgpo_reg.value_disabled( name="MyValue1", @@ -237,26 +480,28 @@ def test_value_disabled_existing_change(reg_pol): expected = { "changes": { "new": { - "data": "**del.MyValue1", - "key": "SOFTWARE\\MyKey1", - "name": "MyValue1", + "pol": { + "data": "**del.MyValue1", + }, + "reg": {}, }, "old": { - "data": "squidward", - "key": "SOFTWARE\\MyKey1", - "name": "MyValue1", + "pol": { + "data": "squidward", + }, + "reg": {"data": "squidward", "type": "REG_SZ"}, }, }, - "comment": "Registry.pol value enabled", + "comment": "Registry policy value disabled", "name": "MyValue1", "result": True, } assert result == expected -def test_value_disabled_existing_no_change(reg_pol): +def test_machine_value_disabled_existing_no_change(reg_pol_mach): """ - Test value.disabled with an existing disabled value + Test value.disabled with an existing disabled value in Machine policy """ result = lgpo_reg.value_disabled( name="MyValue2", @@ -264,98 +509,520 @@ def test_value_disabled_existing_no_change(reg_pol): ) expected = { "changes": {}, - "comment": "Registry.pol value already disabled", + "comment": "Registry policy value already disabled", "name": "MyValue2", "result": True, } assert result == expected -def test_value_disabled_test_true(empty_reg_pol): +def test_machine_value_disabled_test_true(empty_reg_pol_mach): """ - Test value.disabled when test=True + Test value.disabled when test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_disabled( name="MyValue", - key="SOFTWARE\\MyKey", + key="SOFTWARE\\MyKey1", ) expected = { "changes": {}, - "comment": "Registry.pol value will be disabled", + "comment": "Policy value will be disabled", "name": "MyValue", "result": None, } assert result == expected -def test_value_absent(reg_pol): +def test_machine_value_absent(reg_pol_mach): """ - Test value.absent + Test value.absent in Machine policy """ result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": { - "new": {}, + "new": {"pol": {}, "reg": {}}, "old": { - "data": "squidward", - "key": "SOFTWARE\\MyKey1", - "name": "MyValue1", - "type": "REG_SZ", + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value deleted", + "comment": "Registry policy value deleted", "name": "MyValue1", "result": True, } assert result == expected -def test_value_absent_no_change(empty_reg_pol): +def test_machine_value_absent_no_change(empty_reg_pol_mach): """ - Test value.absent when the value is already absent + Test value.absent when the value is already absent in Machine policy """ result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": {}, - "comment": "Registry.pol value already absent", + "comment": "Registry policy value already deleted", "name": "MyValue1", "result": True, } assert result == expected -def test_value_absent_disabled(reg_pol): +def test_machine_value_absent_disabled(reg_pol_mach): """ - Test value.absent when the value is disabled + Test value.absent when the value is disabled in Machine policy """ result = lgpo_reg.value_absent(name="MyValue2", key="SOFTWARE\\MyKey1") expected = { "changes": { - "new": {}, + "new": {"pol": {}}, "old": { - "data": "**del.MyValue2", - "key": "SOFTWARE\\MyKey1", - "name": "MyValue2", - "type": "REG_SZ", + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, }, }, - "comment": "Registry.pol value deleted", + "comment": "Registry policy value deleted", "name": "MyValue2", "result": True, } assert result == expected -def test_value_absent_test_true(reg_pol): +def test_machine_value_absent_test_true(reg_pol_mach): """ - Test value.absent with test=True + Test value.absent with test=True in Machine policy """ with patch.dict(lgpo_reg.__opts__, {"test": True}): result = lgpo_reg.value_absent(name="MyValue1", key="SOFTWARE\\MyKey1") expected = { "changes": {}, - "comment": "Registry.pol value will be deleted", + "comment": "Policy value will be deleted\nRegistry value will be deleted", + "name": "MyValue1", + "result": None, + } + assert result == expected + + +def test_user_value_present(empty_reg_pol_user): + """ + Test value.present in User policy + """ + result = lgpo_reg.value_present( + name="MyValue", + key="SOFTWARE\\MyKey1", + v_data="1", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 1, + "type": "REG_DWORD", + }, + "reg": { + "data": 1, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": {}, + "reg": {}, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue", + "result": True, + } + assert result == expected + + +def test_user_value_present_similar_names(empty_reg_pol_user): + """ + Test value.present in User policy + """ + lgpo_reg.value_present( + name="MyValueTwo", + key="SOFTWARE\\MyKey1", + v_data="1", + v_type="REG_DWORD", + policy_class="User", + ) + lgpo_reg.value_present( + name="MyValue", + key="SOFTWARE\\MyKey1", + v_data="1", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "SOFTWARE\\MyKey1": { + "MyValue": { + "type": "REG_DWORD", + "data": 1, + }, + "MyValueTwo": { + "type": "REG_DWORD", + "data": 1, + }, + }, + } + result = win_lgpo_reg.read_reg_pol(policy_class="User") + assert result == expected + + +def test_user_value_present_existing_change(reg_pol_user): + """ + Test value.present with existing incorrect value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue1", + key="SOFTWARE\\MyKey1", + v_data="2", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_present_existing_change_dword(reg_pol_user): + """ + Test value.present with existing incorrect value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue3", + key="SOFTWARE\\MyKey1", + v_data=1, + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 1, + }, + "reg": { + "data": 1, + }, + }, + "old": { + "pol": { + "data": 0, + }, + "reg": { + "data": 0, + }, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue3", + "result": True, + } + assert result == expected + + +def test_user_value_present_existing_no_change(reg_pol_user): + """ + Test value.present with existing correct value in User policy + """ + result = lgpo_reg.value_present( + name="MyValue1", + key="SOFTWARE\\MyKey1", + v_data="squidward", + v_type="REG_SZ", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Policy value already present\nRegistry value already present", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_present_test_true(empty_reg_pol_user): + """ + Test value.present with test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_present( + name="MyValue", + key="SOFTWARE\\MyKey1", + v_data="1", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Policy value will be set\nRegistry value will be set", + "name": "MyValue", + "result": None, + } + assert result == expected + + +def test_user_value_present_existing_disabled(reg_pol_user): + """ + Test value.present with existing value that is disabled in User policy + """ + result = lgpo_reg.value_present( + name="MyValue2", + key="SOFTWARE\\MyKey1", + v_data="2", + v_type="REG_DWORD", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": 2, + "type": "REG_DWORD", + }, + "reg": { + "data": 2, + "type": "REG_DWORD", + }, + }, + "old": { + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + "reg": {}, + }, + }, + "comment": "Registry policy value has been set", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_disabled(empty_reg_pol_user): + """ + Test value.disabled in User policy + """ + result = lgpo_reg.value_disabled( + name="MyValue1", key="SOFTWARE\\MyKey1", policy_class="User" + ) + expected = { + "changes": { + "new": { + "pol": { + "data": "**del.MyValue1", + "type": "REG_SZ", + }, + }, + "old": {"pol": {}}, + }, + "comment": "Registry policy value disabled", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_existing_change(reg_pol_user): + """ + Test value.disabled with an existing value that is not disabled in User + policy + """ + result = lgpo_reg.value_disabled( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": { + "data": "**del.MyValue1", + }, + "reg": {}, + }, + "old": { + "pol": { + "data": "squidward", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, + }, + }, + "comment": "Registry policy value disabled", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_existing_no_change(reg_pol_user): + """ + Test value.disabled with an existing disabled value in User policy + """ + result = lgpo_reg.value_disabled( + name="MyValue2", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry policy value already disabled", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_disabled_test_true(empty_reg_pol_user): + """ + Test value.disabled when test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_disabled( + name="MyValue", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Policy value will be disabled", + "name": "MyValue", + "result": None, + } + assert result == expected + + +def test_user_value_absent(reg_pol_user): + """ + Test value.absent in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": { + "pol": {}, + "reg": {}, + }, + "old": { + "pol": { + "data": "squidward", + "type": "REG_SZ", + }, + "reg": { + "data": "squidward", + "type": "REG_SZ", + }, + }, + }, + "comment": "Registry policy value deleted", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_absent_no_change(empty_reg_pol_user): + """ + Test value.absent when the value is already absent in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Registry policy value already deleted", + "name": "MyValue1", + "result": True, + } + assert result == expected + + +def test_user_value_absent_disabled(reg_pol_user): + """ + Test value.absent when the value is disabled in User policy + """ + result = lgpo_reg.value_absent( + name="MyValue2", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": { + "new": {"pol": {}}, + "old": { + "pol": { + "data": "**del.MyValue2", + "type": "REG_SZ", + }, + }, + }, + "comment": "Registry policy value deleted", + "name": "MyValue2", + "result": True, + } + assert result == expected + + +def test_user_value_absent_test_true(reg_pol_user): + """ + Test value.absent with test=True in User policy + """ + with patch.dict(lgpo_reg.__opts__, {"test": True}): + result = lgpo_reg.value_absent( + name="MyValue1", + key="SOFTWARE\\MyKey1", + policy_class="User", + ) + expected = { + "changes": {}, + "comment": "Policy value will be deleted\nRegistry value will be deleted", "name": "MyValue1", "result": None, } diff --git a/tests/pytests/unit/states/test_zfs.py b/tests/pytests/unit/states/test_zfs.py index 1c25a4c0e99a..ce622cf79516 100644 --- a/tests/pytests/unit/states/test_zfs.py +++ b/tests/pytests/unit/states/test_zfs.py @@ -18,6 +18,10 @@ from tests.support.mock import MagicMock, patch from tests.support.zfs import ZFSMockData +pytestmark = [ + pytest.mark.slow_test, +] + @pytest.fixture def utils_patch(): diff --git a/tests/pytests/unit/states/test_zpool.py b/tests/pytests/unit/states/test_zpool.py index af14b900f0b9..62afec60bea0 100644 --- a/tests/pytests/unit/states/test_zpool.py +++ b/tests/pytests/unit/states/test_zpool.py @@ -18,6 +18,10 @@ from tests.support.mock import MagicMock, patch from tests.support.zfs import ZFSMockData +pytestmark = [ + pytest.mark.slow_test, +] + @pytest.fixture def utils_patch(): diff --git a/tests/pytests/unit/test_acl.py b/tests/pytests/unit/test_acl.py new file mode 100644 index 000000000000..e5788fb846d7 --- /dev/null +++ b/tests/pytests/unit/test_acl.py @@ -0,0 +1,96 @@ +""" + Unit tests for salt.acl.ClientACL +""" + + +import pytest + +from salt import acl + + +@pytest.fixture +def configure_loader_modules(): + return {acl: {}} + + +@pytest.fixture +def blacklist(): + return { + "users": ["joker", "penguin", "*bad_*", "blocked_.*", "^Homer$"], + "modules": ["cmd.run", "test.fib", "rm-rf.*"], + } + + +@pytest.fixture +def whitelist(): + return { + "users": ["testuser", "saltuser"], + "modules": ["test.ping", "grains.items"], + } + + +def test_user_is_blacklisted(blacklist): + """ + test user_is_blacklisted + """ + client_acl = acl.PublisherACL(blacklist) + + assert client_acl.user_is_blacklisted("joker") + assert client_acl.user_is_blacklisted("penguin") + assert client_acl.user_is_blacklisted("bad_") + assert client_acl.user_is_blacklisted("bad_user") + assert client_acl.user_is_blacklisted("bad_*") + assert client_acl.user_is_blacklisted("user_bad_") + assert client_acl.user_is_blacklisted("blocked_") + assert client_acl.user_is_blacklisted("blocked_user") + assert client_acl.user_is_blacklisted("blocked_.*") + assert client_acl.user_is_blacklisted("Homer") + + assert not client_acl.user_is_blacklisted("batman") + assert not client_acl.user_is_blacklisted("robin") + assert not client_acl.user_is_blacklisted("bad") + assert not client_acl.user_is_blacklisted("blocked") + assert not client_acl.user_is_blacklisted("NotHomer") + assert not client_acl.user_is_blacklisted("HomerSimpson") + + +def test_cmd_is_blacklisted(blacklist): + """ + test cmd_is_blacklisted + """ + client_acl = acl.PublisherACL(blacklist) + + assert client_acl.cmd_is_blacklisted("cmd.run") + assert client_acl.cmd_is_blacklisted("test.fib") + assert client_acl.cmd_is_blacklisted("rm-rf.root") + + assert not client_acl.cmd_is_blacklisted("cmd.shell") + assert not client_acl.cmd_is_blacklisted("test.versions") + assert not client_acl.cmd_is_blacklisted("arm-rf.root") + + assert client_acl.cmd_is_blacklisted(["cmd.run", "state.sls"]) + assert not client_acl.cmd_is_blacklisted(["state.highstate", "state.sls"]) + + +def test_user_is_whitelisted(whitelist): + """ + test user_is_whitelisted + """ + client_acl = acl.PublisherACL(whitelist) + + assert client_acl.user_is_whitelisted("testuser") + assert client_acl.user_is_whitelisted("saltuser") + assert not client_acl.user_is_whitelisted("three") + assert not client_acl.user_is_whitelisted("hans") + + +def test_cmd_is_whitelisted(whitelist): + """ + test cmd_is_whitelisted + """ + client_acl = acl.PublisherACL(whitelist) + + assert client_acl.cmd_is_whitelisted("test.ping") + assert client_acl.cmd_is_whitelisted("grains.items") + assert not client_acl.cmd_is_whitelisted("cmd.run") + assert not client_acl.cmd_is_whitelisted("test.version") diff --git a/tests/pytests/unit/test_log.py b/tests/pytests/unit/test_log.py new file mode 100644 index 000000000000..83d2e9e2cdab --- /dev/null +++ b/tests/pytests/unit/test_log.py @@ -0,0 +1,169 @@ +""" + :codeauthor: Pedro Algarvio (pedro@algarvio.me) + + + tests.unit.log_test + ~~~~~~~~~~~~~~~~~~~ + + Test salt's "hacked" logging +""" + + +import io +import logging + +from salt._logging.handlers import StreamHandler +from salt._logging.impl import SaltLoggingClass +from tests.support.helpers import TstSuiteLoggingHandler + + +def test_issue_2853_regex_TypeError(): + # Now, python's logging logger class is ours. + # Let's make sure we have at least one instance + log = SaltLoggingClass(__name__) + + # Test for a format which includes digits in name formatting. + log_format = "[%(name)-15s] %(message)s" + handler = TstSuiteLoggingHandler(format=log_format) + log.addHandler(handler) + + # Trigger TstSuiteLoggingHandler.__enter__ + with handler: + # Let's create another log instance to trigger salt's logging class + # calculations. + try: + SaltLoggingClass("{}.with_digits".format(__name__)) + except Exception as err: # pylint: disable=broad-except + raise AssertionError("No exception should have been raised: {}".format(err)) + + # Remove the testing handler + log.removeHandler(handler) + + # Test for a format which does not include digits in name formatting. + log_format = "[%(name)s] %(message)s" + handler = TstSuiteLoggingHandler(format=log_format) + log.addHandler(handler) + + # Trigger TstSuiteLoggingHandler.__enter__ + with handler: + # Let's create another log instance to trigger salt's logging class + # calculations. + try: + SaltLoggingClass("{}.without_digits".format(__name__)) + except Exception as err: # pylint: disable=broad-except + raise AssertionError("No exception should have been raised: {}".format(err)) + + # Remove the testing handler + log.removeHandler(handler) + + +def test_exc_info_on_loglevel(): + def raise_exception_on_purpose(): + 1 / 0 # pylint: disable=pointless-statement + + log = SaltLoggingClass(__name__) + + # Only stream2 should contain the traceback + stream1 = io.StringIO() + stream2 = io.StringIO() + handler1 = StreamHandler(stream1) + handler2 = StreamHandler(stream2) + + handler1.setLevel(logging.INFO) + handler2.setLevel(logging.DEBUG) + + log.addHandler(handler1) + log.addHandler(handler2) + + try: + raise_exception_on_purpose() + except ZeroDivisionError as exc: + log.error( + "Exception raised on purpose caught: ZeroDivisionError", + exc_info_on_loglevel=logging.DEBUG, + ) + + try: + assert ( + "Exception raised on purpose caught: ZeroDivisionError" + in stream1.getvalue() + ) + assert "Traceback (most recent call last)" not in stream1.getvalue() + assert ( + "Exception raised on purpose caught: ZeroDivisionError" + in stream2.getvalue() + ) + assert "Traceback (most recent call last)" in stream2.getvalue() + finally: + log.removeHandler(handler1) + log.removeHandler(handler2) + + # Both streams should contain the traceback + stream1 = io.StringIO() + stream2 = io.StringIO() + handler1 = StreamHandler(stream1) + handler2 = StreamHandler(stream2) + + handler1.setLevel(logging.INFO) + handler2.setLevel(logging.DEBUG) + + log.addHandler(handler1) + log.addHandler(handler2) + + try: + raise_exception_on_purpose() + except ZeroDivisionError as exc: + log.error( + "Exception raised on purpose caught: ZeroDivisionError", + exc_info_on_loglevel=logging.INFO, + ) + + try: + assert ( + "Exception raised on purpose caught: ZeroDivisionError" + in stream1.getvalue() + ) + assert "Traceback (most recent call last)" in stream1.getvalue() + assert ( + "Exception raised on purpose caught: ZeroDivisionError" + in stream2.getvalue() + ) + assert "Traceback (most recent call last)" in stream2.getvalue() + finally: + log.removeHandler(handler1) + log.removeHandler(handler2) + + # No streams should contain the traceback + stream1 = io.StringIO() + stream2 = io.StringIO() + handler1 = StreamHandler(stream1) + handler2 = StreamHandler(stream2) + + handler1.setLevel(logging.ERROR) + handler2.setLevel(logging.INFO) + + log.addHandler(handler1) + log.addHandler(handler2) + + try: + raise_exception_on_purpose() + except ZeroDivisionError as exc: + log.error( + "Exception raised on purpose caught: ZeroDivisionError", + exc_info_on_loglevel=logging.DEBUG, + ) + + try: + assert ( + "Exception raised on purpose caught: ZeroDivisionError" + in stream1.getvalue() + ) + assert "Traceback (most recent call last)" not in stream1.getvalue() + assert ( + "Exception raised on purpose caught: ZeroDivisionError" + in stream2.getvalue() + ) + assert "Traceback (most recent call last)" not in stream2.getvalue() + finally: + log.removeHandler(handler1) + log.removeHandler(handler2) diff --git a/tests/pytests/unit/test_master.py b/tests/pytests/unit/test_master.py index 14e137f49833..502767d3e343 100644 --- a/tests/pytests/unit/test_master.py +++ b/tests/pytests/unit/test_master.py @@ -21,14 +21,46 @@ def encrypted_requests(tmp_path): ) +def test_maintenance_duration(): + """ + Validate Maintenance process duration. + """ + opts = { + "loop_interval": 10, + "maintenance_interval": 1, + "cachedir": "/tmp", + "sock_dir": "/tmp", + "maintenance_niceness": 1, + "key_cache": "sched", + "conf_file": "", + "master_job_cache": "", + "pki_dir": "/tmp", + "eauth_tokens": "", + } + mp = salt.master.Maintenance(opts) + with patch("salt.utils.verify.check_max_open_files") as check_files, patch.object( + mp, "handle_key_cache" + ) as handle_key_cache, patch("salt.daemons") as salt_daemons, patch.object( + mp, "handle_git_pillar" + ) as handle_git_pillar: + mp.run() + assert salt_daemons.masterapi.clean_old_jobs.called + assert salt_daemons.masterapi.clean_expired_tokens.called + assert salt_daemons.masterapi.clean_pub_auth.called + assert handle_git_pillar.called + + def test_fileserver_duration(): + """ + Validate Fileserver process duration. + """ with patch("salt.master.FileserverUpdate._do_update") as update: start = time.time() salt.master.FileserverUpdate.update(1, {}, 1) end = time.time() # Interval is equal to timeout so the _do_update method will be called # one time. - update.called_once() + update.assert_called_once() # Timeout is 1 second duration = end - start if duration > 2 and salt.utils.platform.spawning_platform(): diff --git a/tests/pytests/unit/test_minion.py b/tests/pytests/unit/test_minion.py index 1cee025a485b..47eda02b7193 100644 --- a/tests/pytests/unit/test_minion.py +++ b/tests/pytests/unit/test_minion.py @@ -8,6 +8,7 @@ import salt.ext.tornado.gen import salt.ext.tornado.testing import salt.minion +import salt.modules.test as test_mod import salt.syspaths import salt.utils.crypt import salt.utils.event as event @@ -21,35 +22,33 @@ log = logging.getLogger(__name__) -def test_minion_load_grains_false(): +def test_minion_load_grains_false(minion_opts): """ Minion does not generate grains when load_grains is False """ - opts = {"random_startup_delay": 0, "grains": {"foo": "bar"}} + minion_opts["grains"] = {"foo": "bar"} with patch("salt.loader.grains") as grainsfunc: - minion = salt.minion.Minion(opts, load_grains=False) - assert minion.opts["grains"] == opts["grains"] + minion = salt.minion.Minion(minion_opts, load_grains=False) + assert minion.opts["grains"] == minion_opts["grains"] grainsfunc.assert_not_called() -def test_minion_load_grains_true(): +def test_minion_load_grains_true(minion_opts): """ Minion generates grains when load_grains is True """ - opts = {"random_startup_delay": 0, "grains": {}} with patch("salt.loader.grains") as grainsfunc: - minion = salt.minion.Minion(opts, load_grains=True) + minion = salt.minion.Minion(minion_opts, load_grains=True) assert minion.opts["grains"] != {} grainsfunc.assert_called() -def test_minion_load_grains_default(): +def test_minion_load_grains_default(minion_opts): """ Minion load_grains defaults to True """ - opts = {"random_startup_delay": 0, "grains": {}} with patch("salt.loader.grains") as grainsfunc: - minion = salt.minion.Minion(opts) + minion = salt.minion.Minion(minion_opts) assert minion.opts["grains"] != {} grainsfunc.assert_called() @@ -98,7 +97,8 @@ def test_send_req_fires_completion_event(event, minion_opts): if "fire_event" in call[0]: condition_event_tag = ( len(call.args) > 1 - and call.args[1] == "__master_req_channel_payload" + and call.args[1] + == f"__master_req_channel_payload/{minion_opts['master']}" ) condition_event_tag_error = "{} != {}; Call(number={}): {}".format( idx, call, call.args[1], "__master_req_channel_payload" @@ -140,22 +140,17 @@ async def test_send_req_async_regression_62453(minion_opts): assert rtn is False -def test_mine_send_tries(): +def test_mine_send_tries(minion_opts): channel_enter = MagicMock() channel_enter.send.side_effect = lambda load, timeout, tries: tries channel = MagicMock() channel.__enter__.return_value = channel_enter - opts = { - "random_startup_delay": 0, - "grains": {}, - "return_retry_tries": 20, - "minion_sign_messages": False, - } + minion_opts["return_retry_tries"] = 20 with patch("salt.channel.client.ReqChannel.factory", return_value=channel), patch( "salt.loader.grains" ): - minion = salt.minion.Minion(opts) + minion = salt.minion.Minion(minion_opts) minion.tok = "token" data = {} @@ -709,9 +704,11 @@ def compile_pillar(self): try: with patch("salt.pillar.get_pillar", return_value=MockPillarCompiler()): - with patch("salt.loader.executors") as execmock: + with patch("salt.loader.executors", mock=MagicMock()) as execmock: minion.gen_modules() - assert execmock.called_with(minion.opts, minion.functions) + execmock.assert_called_once_with( + minion.opts, functions=minion.functions, proxy=minion.proxy, context={} + ) finally: minion.destroy() @@ -1097,3 +1094,28 @@ async def test_master_type_disable(minion_opts): assert minion.connected is False finally: minion.destroy() + + +async def test_syndic_async_req_channel(syndic_opts): + syndic_opts["_minion_conf_file"] = "" + syndic_opts["master_uri"] = "tcp://127.0.0.1:4506" + syndic = salt.minion.Syndic(syndic_opts) + syndic.pub_channel = MagicMock() + syndic.tune_in_no_block() + assert isinstance(syndic.async_req_channel, salt.channel.client.AsyncReqChannel) + + +@pytest.mark.slow_test +def test_load_args_and_kwargs(minion_opts): + """ + Ensure load_args_and_kwargs performs correctly + """ + _args = [{"max": 40, "__kwarg__": True}] + ret = salt.minion.load_args_and_kwargs(test_mod.rand_sleep, _args) + assert ret == ([], {"max": 40}) + assert all([True if "__kwarg__" in item else False for item in _args]) + + # Test invalid arguments + _args = [{"max_sleep": 40, "__kwarg__": True}] + with pytest.raises(salt.exceptions.SaltInvocationError): + ret = salt.minion.load_args_and_kwargs(test_mod.rand_sleep, _args) diff --git a/tests/pytests/unit/test_payload.py b/tests/pytests/unit/test_payload.py index ecd77ab4fcfb..cf862e256b85 100644 --- a/tests/pytests/unit/test_payload.py +++ b/tests/pytests/unit/test_payload.py @@ -7,8 +7,11 @@ import datetime import logging +import zmq + import salt.exceptions import salt.payload +import salt.utils.msgpack from salt.defaults import _Constant from salt.utils import immutabletypes from salt.utils.odict import OrderedDict @@ -210,3 +213,93 @@ def test_constants(): sdata = salt.payload.dumps(constant) odata = salt.payload.loads(sdata) assert odata == constant + + +def test_package(): + value = salt.utils.msgpack.dumps("test") + assert salt.payload.package("test") == value + + +def test_unpackage(): + value = [b"test"] + packed = salt.utils.msgpack.dumps(value) + assert salt.payload.unpackage(packed) == value + + +def test_format_payload(): + expected = salt.utils.msgpack.dumps( + {"enc": [b"test"], "load": {"kwargs": {"foo": "bar"}}} + ) + enc = [b"test"] + kwargs = {"foo": "bar"} + payload = salt.payload.format_payload(enc=enc, kwargs=kwargs) + assert payload == expected + + +def test_SREQ_init(): + req = salt.payload.SREQ( + "tcp://salt:3434", id_=b"id", serial="msgpack", linger=1, opts=None + ) + assert req.master == "tcp://salt:3434" + assert req.id_ == b"id" + assert req.linger == 1 + assert req.opts is None + assert isinstance(req.context, zmq.Context) + assert isinstance(req.poller, zmq.Poller) + + +def test_SREQ_socket(): + req = salt.payload.SREQ( + "tcp://salt:3434", id_=b"id", serial="msgpack", linger=1, opts=None + ) + # socket() is a property that auto creates a socket if a socket is wanted. + socket = req.socket + assert isinstance(socket, zmq.Socket) + + req = salt.payload.SREQ( + "tcp://[2001:db8:85a3:8d3:1319:8a2e:370:7348]:3434", + id_=b"id", + serial="msgpack", + linger=1, + opts=None, + ) + # socket() is a property that auto creates a socket if a socket is wanted. + socket = req.socket + assert isinstance(socket, zmq.Socket) + + req = salt.payload.SREQ( + "tcp://salt:3434", id_=None, serial="msgpack", linger=1, opts=None + ) + # socket() is a property that auto creates a socket if a socket is wanted. + socket = req.socket + assert isinstance(socket, zmq.Socket) + + +def test_SREQ_set_tcp_keepalive(): + opts = {"tcp_keepalive": True} + req = salt.payload.SREQ( + "tcp://salt:3434", id_=b"id", serial="msgpack", linger=1, opts=opts + ) + socket = req.socket + assert req._socket.getsockopt(zmq.TCP_KEEPALIVE) + + opts = {"tcp_keepalive_idle": 100} + req = salt.payload.SREQ( + "tcp://salt:3434", id_=b"id", serial="msgpack", linger=1, opts=opts + ) + socket = req.socket + assert req._socket.getsockopt(zmq.TCP_KEEPALIVE_IDLE) == 100 + + opts = {"tcp_keepalive_cnt": 100} + req = salt.payload.SREQ( + "tcp://salt:3434", id_=b"id", serial="msgpack", linger=1, opts=opts + ) + socket = req.socket + assert req._socket.getsockopt(zmq.TCP_KEEPALIVE_CNT) == 100 + + opts = {"tcp_keepalive_intvl": 100} + req = salt.payload.SREQ( + "tcp://salt:3434", id_=b"id", serial="msgpack", linger=1, opts=opts + ) + socket = req.socket + assert req._socket.getsockopt(zmq.TCP_KEEPALIVE_INTVL) == 100 diff --git a/tests/pytests/unit/test_scripts.py b/tests/pytests/unit/test_scripts.py new file mode 100644 index 000000000000..a636275e0de0 --- /dev/null +++ b/tests/pytests/unit/test_scripts.py @@ -0,0 +1,80 @@ +import pytest + +from salt.scripts import _pip_args, _pip_environment + + +def test_pip_environment_no_pypath(): + """ + We add PYTHONPATH to environemnt when it doesn't already exist. + """ + extras = "/tmp/footest" + env = {"HOME": "/home/dwoz"} + pipenv = _pip_environment(env, extras) + assert "PYTHONPATH" not in env + assert "PYTHONPATH" in pipenv + assert pipenv["PYTHONPATH"] == "/tmp/footest" + + +@pytest.mark.skip_on_windows(reason="Specific to *nix systems") +def test_pip_environment_pypath_nix(): + """ + We update PYTHONPATH in environemnt when it's already set. + """ + extras = "/tmp/footest" + env = { + "HOME": "/home/dwoz", + "PYTHONPATH": "/usr/local/lib/python3.10/site-packages", + } + assert "PYTHONPATH" in env + pipenv = _pip_environment(env, extras) + assert env["PYTHONPATH"] == "/usr/local/lib/python3.10/site-packages" + assert "PYTHONPATH" in pipenv + assert ( + pipenv["PYTHONPATH"] == "/tmp/footest:/usr/local/lib/python3.10/site-packages" + ) + + +@pytest.mark.skip_unless_on_windows(reason="Specific to win32 systems") +def test_pip_environment_pypath_win(): + """ + We update PYTHONPATH in environemnt when it's already set. + """ + extras = "/tmp/footest" + env = { + "HOME": "/home/dwoz", + "PYTHONPATH": "/usr/local/lib/python3.10/site-packages", + } + assert "PYTHONPATH" in env + pipenv = _pip_environment(env, extras) + assert env["PYTHONPATH"] == "/usr/local/lib/python3.10/site-packages" + assert "PYTHONPATH" in pipenv + assert ( + pipenv["PYTHONPATH"] == "/tmp/footest;/usr/local/lib/python3.10/site-packages" + ) + + +def test_pip_args_not_installing(): + extras = "/tmp/footest" + args = ["list"] + pargs = _pip_args(args, extras) + assert pargs is not args + assert args == ["list"] + assert pargs == ["list"] + + +def test_pip_args_installing_without_target(): + extras = "/tmp/footest" + args = ["install"] + pargs = _pip_args(args, extras) + assert pargs is not args + assert args == ["install"] + assert pargs == ["install", "--target=/tmp/footest"] + + +def test_pip_args_installing_with_target(): + extras = "/tmp/footest" + args = ["install", "--target=/tmp/bartest"] + pargs = _pip_args(args, extras) + assert pargs is not args + assert args == ["install", "--target=/tmp/bartest"] + assert pargs == ["install", "--target=/tmp/bartest"] diff --git a/tests/pytests/unit/test_template.py b/tests/pytests/unit/test_template.py new file mode 100644 index 000000000000..69d73fe5c477 --- /dev/null +++ b/tests/pytests/unit/test_template.py @@ -0,0 +1,122 @@ +import io + +import pytest + +import salt.state +from salt import template +from tests.support.mock import MagicMock, patch + + +@pytest.fixture +def render_dict(): + return { + "jinja": "fake_jinja_func", + "json": "fake_json_func", + "mako": "fake_make_func", + } + + +def test_compile_template_str_mkstemp_cleanup(minion_opts): + minion_opts["file_client"] = "local" + with patch("os.unlink", MagicMock()) as unlinked: + _state = salt.state.State(minion_opts) + ret = template.compile_template_str( + "{'val':'test'}", + _state.rend, + _state.opts["renderer"], + _state.opts["renderer_blacklist"], + _state.opts["renderer_whitelist"], + ) + assert ret == {"val": "test"} + unlinked.assert_called_once() + + +def test_compile_template_bad_type(): + """ + Test to ensure that unsupported types cannot be passed to the template compiler + """ + ret = template.compile_template(["1", "2", "3"], None, None, None, None) + assert ret == {} + + +def test_compile_template_preserves_windows_newlines(): + """ + Test to ensure that a file with Windows newlines, when rendered by a + template renderer, does not eat the CR character. + """ + + def _get_rend(renderer, value): + """ + We need a new MagicMock each time since we're dealing with StringIO + objects which are read like files. + """ + return {renderer: MagicMock(return_value=io.StringIO(value))} + + input_data_windows = "foo\r\nbar\r\nbaz\r\n" + input_data_non_windows = input_data_windows.replace("\r\n", "\n") + renderer = "test" + blacklist = whitelist = [] + + ret = template.compile_template( + ":string:", + _get_rend(renderer, input_data_non_windows), + renderer, + blacklist, + whitelist, + input_data=input_data_windows, + ).read() + # Even though the mocked renderer returned a string without the windows + # newlines, the compiled template should still have them. + assert ret == input_data_windows + + # Now test that we aren't adding them in unnecessarily. + ret = template.compile_template( + ":string:", + _get_rend(renderer, input_data_non_windows), + renderer, + blacklist, + whitelist, + input_data=input_data_non_windows, + ).read() + assert ret == input_data_non_windows + + # Finally, ensure that we're not unnecessarily replacing the \n with + # \r\n in the event that the renderer returned a string with the + # windows newlines intact. + ret = template.compile_template( + ":string:", + _get_rend(renderer, input_data_windows), + renderer, + blacklist, + whitelist, + input_data=input_data_windows, + ).read() + assert ret == input_data_windows + + +def test_check_render_pipe_str(render_dict): + """ + Check that all renderers specified in the pipe string are available. + """ + ret = template.check_render_pipe_str("jinja|json", render_dict, None, None) + assert ("fake_jinja_func", "") in ret + assert ("fake_json_func", "") in ret + assert ("OBVIOUSLY_NOT_HERE", "") not in ret + + +def test_check_renderer_blacklisting(render_dict): + """ + Check that all renderers specified in the pipe string are available. + """ + ret = template.check_render_pipe_str("jinja|json", render_dict, ["jinja"], None) + assert ret == [("fake_json_func", "")] + ret = template.check_render_pipe_str("jinja|json", render_dict, None, ["jinja"]) + assert ret == [("fake_jinja_func", "")] + ret = template.check_render_pipe_str( + "jinja|json", render_dict, ["jinja"], ["jinja"] + ) + assert ret == [] + ret = template.check_render_pipe_str( + "jinja|json", render_dict, ["jinja"], ["jinja", "json"] + ) + assert ret == [("fake_json_func", "")] diff --git a/tests/pytests/unit/test_version.py b/tests/pytests/unit/test_version.py index b5f2b105a70e..1cb94c619cad 100644 --- a/tests/pytests/unit/test_version.py +++ b/tests/pytests/unit/test_version.py @@ -187,7 +187,7 @@ def test_string_new_version_minor(): ver = SaltStackVersion(major=maj_ver, minor=min_ver) assert ver.minor == min_ver assert not ver.bugfix - assert ver.string == "{}.{}".format(maj_ver, min_ver) + assert ver.string == f"{maj_ver}.{min_ver}" def test_string_new_version_minor_as_string(): @@ -201,13 +201,13 @@ def test_string_new_version_minor_as_string(): ver = SaltStackVersion(major=maj_ver, minor=min_ver) assert ver.minor == int(min_ver) assert not ver.bugfix - assert ver.string == "{}.{}".format(maj_ver, min_ver) + assert ver.string == f"{maj_ver}.{min_ver}" # This only seems to happen on a cloned repo without its tags maj_ver = "3000" min_ver = "" ver = SaltStackVersion(major=maj_ver, minor=min_ver) - assert ver.minor is None, "{!r} is not {!r}".format(ver.minor, min_ver) + assert ver.minor is None, f"{ver.minor!r} is not {min_ver!r}" assert not ver.bugfix assert ver.string == maj_ver @@ -222,7 +222,7 @@ def test_string_old_version(): min_ver = "2" ver = SaltStackVersion(major=maj_ver, minor=min_ver) assert ver.bugfix == 0 - assert ver.string == "{}.{}.0".format(maj_ver, min_ver) + assert ver.string == f"{maj_ver}.{min_ver}.0" @pytest.mark.parametrize( @@ -433,7 +433,7 @@ def test_system_version_linux(): """ with patch( - "distro.linux_distribution", + "salt.utils.platform.linux_distribution", MagicMock(return_value=("Manjaro Linux", "20.0.2", "Lysia")), ): versions = [item for item in system_information()] @@ -441,7 +441,7 @@ def test_system_version_linux(): assert version in versions with patch( - "distro.linux_distribution", + "salt.utils.platform.linux_distribution", MagicMock(return_value=("Debian GNU/Linux", "9", "stretch")), ): versions = [item for item in system_information()] @@ -449,7 +449,7 @@ def test_system_version_linux(): assert version in versions with patch( - "distro.linux_distribution", + "salt.utils.platform.linux_distribution", MagicMock(return_value=("Debian GNU/Linux", "10", "buster")), ): versions = [item for item in system_information()] @@ -457,7 +457,7 @@ def test_system_version_linux(): assert version in versions with patch( - "distro.linux_distribution", + "salt.utils.platform.linux_distribution", MagicMock(return_value=("CentOS Linux", "7", "Core")), ): versions = [item for item in system_information()] @@ -465,7 +465,7 @@ def test_system_version_linux(): assert version in versions with patch( - "distro.linux_distribution", + "salt.utils.platform.linux_distribution", MagicMock(return_value=("CentOS Linux", "8", "Core")), ): versions = [item for item in system_information()] @@ -473,7 +473,7 @@ def test_system_version_linux(): assert version in versions with patch( - "distro.linux_distribution", + "salt.utils.platform.linux_distribution", MagicMock(return_value=("OpenSUSE Leap", "15.1", "")), ): versions = [item for item in system_information()] @@ -537,6 +537,8 @@ def test_versions_report_no_extensions_available(): ("3000.1", "3000.1", "Neon"), ("3005", "3005", "Phosphorus"), ("3006", "3006.0", "Sulfur"), + ("3006.0", "3006.0", "Sulfur"), + ("3006.1", "3006.1", "Sulfur"), ("3015.1", "3015.1", "Manganese"), ("3109.3", "3109.3", None), ], diff --git a/tests/pytests/unit/transport/test_base.py b/tests/pytests/unit/transport/test_base.py new file mode 100644 index 000000000000..da5a6fa26159 --- /dev/null +++ b/tests/pytests/unit/transport/test_base.py @@ -0,0 +1,21 @@ +""" +Unit tests for salt.transport.base. +""" +import pytest + +import salt.transport.base + +pytestmark = [ + pytest.mark.core_test, +] + + +def test_unclosed_warning(): + + transport = salt.transport.base.Transport() + assert transport._closing is False + assert transport._connect_called is False + transport.connect() + assert transport._connect_called is True + with pytest.warns(salt.transport.base.TransportWarning): + del transport diff --git a/tests/pytests/unit/transport/test_ipc.py b/tests/pytests/unit/transport/test_ipc.py index 8f41f9d723ad..5a6878361615 100644 --- a/tests/pytests/unit/transport/test_ipc.py +++ b/tests/pytests/unit/transport/test_ipc.py @@ -6,6 +6,10 @@ import salt.utils.asynchronous import salt.utils.platform +pytestmark = [ + pytest.mark.core_test, +] + def test_ipc_connect_in_async_methods(): "The connect method is in IPCMessageSubscriber's async_methods property" diff --git a/tests/pytests/unit/transport/test_tcp.py b/tests/pytests/unit/transport/test_tcp.py index e56468526f6c..17c7e0727495 100644 --- a/tests/pytests/unit/transport/test_tcp.py +++ b/tests/pytests/unit/transport/test_tcp.py @@ -9,9 +9,14 @@ import salt.channel.server import salt.exceptions import salt.ext.tornado +import salt.ext.tornado.concurrent import salt.transport.tcp from tests.support.mock import MagicMock, PropertyMock, patch +tpytestmark = [ + pytest.mark.core_test, +] + @pytest.fixture def fake_keys(): @@ -479,3 +484,185 @@ def test_presence_removed_on_stream_closed(): io_loop.run_sync(functools.partial(server.publish_payload, package, None)) server.remove_presence_callback.assert_called_with(client) + + +async def test_tcp_pub_client_decode_dict(minion_opts, io_loop): + dmsg = {"meh": "bah"} + client = salt.transport.tcp.TCPPubClient(minion_opts, io_loop) + assert dmsg == await client._decode_messages(dmsg) + + +async def test_tcp_pub_client_decode_msgpack(minion_opts, io_loop): + dmsg = {"meh": "bah"} + msg = salt.payload.dumps(dmsg) + client = salt.transport.tcp.TCPPubClient(minion_opts, io_loop) + assert dmsg == await client._decode_messages(msg) + + +def test_tcp_pub_client_close(minion_opts, io_loop): + client = salt.transport.tcp.TCPPubClient(minion_opts, io_loop) + + message_client = MagicMock() + + client.message_client = message_client + client.close() + assert client._closing is True + assert client.message_client is None + client.close() + message_client.close.assert_called_once_with() + + +async def test_pub_server__stream_read(master_opts, io_loop): + + messages = [salt.transport.frame.frame_msg({"foo": "bar"})] + + class Stream: + def __init__(self, messages): + self.messages = messages + + def read_bytes(self, *args, **kwargs): + if self.messages: + msg = self.messages.pop(0) + future = salt.ext.tornado.concurrent.Future() + future.set_result(msg) + return future + raise salt.ext.tornado.iostream.StreamClosedError() + + client = MagicMock() + client.stream = Stream(messages) + client.address = "client address" + server = salt.transport.tcp.PubServer(master_opts, io_loop) + await server._stream_read(client) + client.close.assert_called_once() + + +async def test_pub_server__stream_read_exception(master_opts, io_loop): + client = MagicMock() + client.stream = MagicMock() + client.stream.read_bytes = MagicMock( + side_effect=[ + Exception("Something went wrong"), + salt.ext.tornado.iostream.StreamClosedError(), + ] + ) + client.address = "client address" + server = salt.transport.tcp.PubServer(master_opts, io_loop) + await server._stream_read(client) + client.close.assert_called_once() + + +async def test_salt_message_server(master_opts): + + received = [] + + def handler(stream, body, header): + + received.append(body) + + server = salt.transport.tcp.SaltMessageServer(handler) + msg = {"foo": "bar"} + messages = [salt.transport.frame.frame_msg(msg)] + + class Stream: + def __init__(self, messages): + self.messages = messages + + def read_bytes(self, *args, **kwargs): + if self.messages: + msg = self.messages.pop(0) + future = salt.ext.tornado.concurrent.Future() + future.set_result(msg) + return future + raise salt.ext.tornado.iostream.StreamClosedError() + + stream = Stream(messages) + address = "client address" + + await server.handle_stream(stream, address) + + # Let loop iterate so callback gets called + await salt.ext.tornado.gen.sleep(0.01) + + assert received + assert [msg] == received + + +async def test_salt_message_server_exception(master_opts, io_loop): + received = [] + + def handler(stream, body, header): + + received.append(body) + + stream = MagicMock() + stream.read_bytes = MagicMock( + side_effect=[ + Exception("Something went wrong"), + ] + ) + address = "client address" + server = salt.transport.tcp.SaltMessageServer(handler) + await server.handle_stream(stream, address) + stream.close.assert_called_once() + + +async def test_message_client_stream_return_exception(minion_opts, io_loop): + msg = {"foo": "bar"} + payload = salt.transport.frame.frame_msg(msg) + future = salt.ext.tornado.concurrent.Future() + future.set_result(payload) + client = salt.transport.tcp.MessageClient( + minion_opts, + "127.0.0.1", + 12345, + connect_callback=MagicMock(), + disconnect_callback=MagicMock(), + ) + client._stream = MagicMock() + client._stream.read_bytes.side_effect = [ + future, + ] + try: + io_loop.add_callback(client._stream_return) + await salt.ext.tornado.gen.sleep(0.01) + client.close() + await salt.ext.tornado.gen.sleep(0.01) + assert client._stream is None + finally: + client.close() + + +def test_tcp_pub_server_pre_fork(master_opts): + process_manager = MagicMock() + server = salt.transport.tcp.TCPPublishServer(master_opts) + server.pre_fork(process_manager) + + +async def test_pub_server_publish_payload(master_opts, io_loop): + server = salt.transport.tcp.PubServer(master_opts, io_loop=io_loop) + package = {"foo": "bar"} + topic_list = ["meh"] + future = salt.ext.tornado.concurrent.Future() + future.set_result(None) + client = MagicMock() + client.stream = MagicMock() + client.stream.write.side_effect = [future] + client.id_ = "meh" + server.clients = [client] + await server.publish_payload(package, topic_list) + client.stream.write.assert_called_once() + + +async def test_pub_server_publish_payload_closed_stream(master_opts, io_loop): + server = salt.transport.tcp.PubServer(master_opts, io_loop=io_loop) + package = {"foo": "bar"} + topic_list = ["meh"] + client = MagicMock() + client.stream = MagicMock() + client.stream.write.side_effect = [ + salt.ext.tornado.iostream.StreamClosedError("mock") + ] + client.id_ = "meh" + server.clients = {client} + await server.publish_payload(package, topic_list) + assert server.clients == set() diff --git a/tests/pytests/unit/transport/test_zeromq.py b/tests/pytests/unit/transport/test_zeromq.py index 6a4f177d1d9e..fe8c39438275 100644 --- a/tests/pytests/unit/transport/test_zeromq.py +++ b/tests/pytests/unit/transport/test_zeromq.py @@ -11,6 +11,7 @@ import time import uuid +import msgpack import pytest import salt.channel.client @@ -18,6 +19,7 @@ import salt.config import salt.crypt import salt.exceptions +import salt.ext.tornado.concurrent import salt.ext.tornado.gen import salt.ext.tornado.ioloop import salt.transport.zeromq @@ -40,6 +42,12 @@ log = logging.getLogger(__name__) + +pytestmark = [ + pytest.mark.core_test, +] + + MASTER_PRIV_KEY = """ -----BEGIN RSA PRIVATE KEY----- MIIEogIBAAKCAQEAoAsMPt+4kuIG6vKyw9r3+OuZrVBee/2vDdVetW+Js5dTlgrJ @@ -445,7 +453,7 @@ def test_payload_handling_exception(temp_salt_minion, temp_salt_master): with MockSaltMinionMaster(temp_salt_minion, temp_salt_master) as minion_master: with patch.object(minion_master.mock, "_handle_payload_hook") as _mock: _mock.side_effect = Exception() - ret = minion_master.channel.send({}, timeout=2, tries=1) + ret = minion_master.channel.send({}, timeout=5, tries=1) assert ret == "Some exception handling minion payload" @@ -1398,3 +1406,123 @@ async def test_req_chan_auth_v2_new_minion_without_master_pub(pki_dir, io_loop): assert "sig" in ret ret = client.auth.handle_signin_response(signin_payload, ret) assert ret == "retry" + + +async def test_req_server_garbage_request(io_loop): + """ + Validate invalid msgpack messages will not raise exceptions in the + RequestServers's message handler. + """ + opts = salt.config.master_config("") + opts["zmq_monitor"] = True + request_server = salt.transport.zeromq.RequestServer(opts) + + def message_handler(payload): + return payload + + request_server.post_fork(message_handler, io_loop) + + byts = msgpack.dumps({"foo": "bar"}) + badbyts = byts[:3] + b"^M" + byts[3:] + + valid_response = msgpack.dumps({"msg": "bad load"}) + + stream = MagicMock() + request_server.stream = stream + + try: + await request_server.handle_message(stream, badbyts) + except Exception as exc: # pylint: disable=broad-except + pytest.fail("Exception was raised {}".format(exc)) + + request_server.stream.send.assert_called_once_with(valid_response) + + +async def test_req_chan_bad_payload_to_decode(pki_dir, io_loop): + opts = { + "master_uri": "tcp://127.0.0.1:4506", + "interface": "127.0.0.1", + "ret_port": 4506, + "ipv6": False, + "sock_dir": ".", + "pki_dir": str(pki_dir.joinpath("minion")), + "id": "minion", + "__role": "minion", + "keysize": 4096, + "max_minions": 0, + "auto_accept": False, + "open_mode": False, + "key_pass": None, + "publish_port": 4505, + "auth_mode": 1, + "acceptance_wait_time": 3, + "acceptance_wait_time_max": 3, + } + SMaster.secrets["aes"] = { + "secret": multiprocessing.Array( + ctypes.c_char, + salt.utils.stringutils.to_bytes(salt.crypt.Crypticle.generate_key_string()), + ), + "reload": salt.crypt.Crypticle.generate_key_string, + } + master_opts = dict(opts, pki_dir=str(pki_dir.joinpath("master"))) + master_opts["master_sign_pubkey"] = False + server = salt.channel.server.ReqServerChannel.factory(master_opts) + + with pytest.raises(salt.exceptions.SaltDeserializationError): + server._decode_payload(None) + with pytest.raises(salt.exceptions.SaltDeserializationError): + server._decode_payload({}) + with pytest.raises(salt.exceptions.SaltDeserializationError): + server._decode_payload(12345) + + +async def test_client_timeout_msg(minion_opts): + client = salt.transport.zeromq.AsyncReqMessageClient( + minion_opts, "tcp://127.0.0.1:4506" + ) + client.connect() + try: + with pytest.raises(salt.exceptions.SaltReqTimeoutError): + await client.send({"meh": "bah"}, 1) + finally: + client.close() + + +def test_pub_client_init(minion_opts, io_loop): + minion_opts["id"] = "minion" + minion_opts["__role"] = "syndic" + minion_opts["master_ip"] = "127.0.0.1" + minion_opts["zmq_filtering"] = True + minion_opts["zmq_monitor"] = True + client = salt.transport.zeromq.PublishClient(minion_opts, io_loop) + client.send(b"asf") + client.close() + + +async def test_unclosed_request_client(minion_opts, io_loop): + minion_opts["master_uri"] = "tcp://127.0.0.1:4506" + client = salt.transport.zeromq.RequestClient(minion_opts, io_loop) + await client.connect() + try: + assert client._closing is False + with pytest.warns(salt.transport.base.TransportWarning): + client.__del__() + finally: + client.close() + + +async def test_unclosed_publish_client(minion_opts, io_loop): + minion_opts["id"] = "minion" + minion_opts["__role"] = "minion" + minion_opts["master_ip"] = "127.0.0.1" + minion_opts["zmq_filtering"] = True + minion_opts["zmq_monitor"] = True + client = salt.transport.zeromq.PublishClient(minion_opts, io_loop) + await client.connect(2121) + try: + assert client._closing is False + with pytest.warns(salt.transport.base.TransportWarning): + client.__del__() + finally: + client.close() diff --git a/tests/pytests/unit/utils/event/test_event.py b/tests/pytests/unit/utils/event/test_event.py index 5ab3d7ffbcf6..f4b6c1599966 100644 --- a/tests/pytests/unit/utils/event/test_event.py +++ b/tests/pytests/unit/utils/event/test_event.py @@ -1,5 +1,8 @@ import hashlib +import os +import stat import time +from pathlib import Path import pytest import zmq.eventloop.ioloop @@ -35,7 +38,7 @@ def sock_dir(tmp_path): def _assert_got_event(evt, data, msg=None, expected_failure=False): assert evt is not None, msg for key in data: - assert key in evt, "{}: Key {} missing".format(msg, key) + assert key in evt, f"{msg}: Key {key} missing" assertMsg = "{0}: Key {1} value mismatch, {2} != {3}" assertMsg = assertMsg.format(msg, key, data[key], evt[key]) if not expected_failure: @@ -56,8 +59,8 @@ def test_minion_event(sock_dir): :10 ] with salt.utils.event.MinionEvent(opts, listen=False) as me: - assert me.puburi == str(sock_dir / "minion_event_{}_pub.ipc".format(id_hash)) - assert me.pulluri == str(sock_dir / "minion_event_{}_pull.ipc".format(id_hash)) + assert me.puburi == str(sock_dir / f"minion_event_{id_hash}_pub.ipc") + assert me.pulluri == str(sock_dir / f"minion_event_{id_hash}_pull.ipc") def test_minion_event_tcp_ipc_mode(): @@ -70,8 +73,8 @@ def test_minion_event_tcp_ipc_mode(): def test_minion_event_no_id(sock_dir): with salt.utils.event.MinionEvent(dict(sock_dir=str(sock_dir)), listen=False) as me: id_hash = hashlib.sha256(salt.utils.stringutils.to_bytes("")).hexdigest()[:10] - assert me.puburi == str(sock_dir / "minion_event_{}_pub.ipc".format(id_hash)) - assert me.pulluri == str(sock_dir / "minion_event_{}_pull.ipc".format(id_hash)) + assert me.puburi == str(sock_dir / f"minion_event_{id_hash}_pub.ipc") + assert me.pulluri == str(sock_dir / f"minion_event_{id_hash}_pull.ipc") @pytest.mark.slow_test @@ -253,9 +256,9 @@ def test_event_many(sock_dir): with eventpublisher_process(str(sock_dir)): with salt.utils.event.MasterEvent(str(sock_dir), listen=True) as me: for i in range(500): - me.fire_event({"data": "{}".format(i)}, "testevents") + me.fire_event({"data": f"{i}"}, "testevents") evt = me.get_event(tag="testevents") - _assert_got_event(evt, {"data": "{}".format(i)}, "Event {}".format(i)) + _assert_got_event(evt, {"data": f"{i}"}, f"Event {i}") @pytest.mark.slow_test @@ -265,10 +268,10 @@ def test_event_many_backlog(sock_dir): with salt.utils.event.MasterEvent(str(sock_dir), listen=True) as me: # Must not exceed zmq HWM for i in range(500): - me.fire_event({"data": "{}".format(i)}, "testevents") + me.fire_event({"data": f"{i}"}, "testevents") for i in range(500): evt = me.get_event(tag="testevents") - _assert_got_event(evt, {"data": "{}".format(i)}, "Event {}".format(i)) + _assert_got_event(evt, {"data": f"{i}"}, f"Event {i}") # Test the fire_master function. As it wraps the underlying fire_event, @@ -297,7 +300,7 @@ def test_connect_pull_should_debug_log_on_StreamClosedError(): event = SaltEvent(node=None) with patch.object(event, "pusher") as mock_pusher: with patch.object( - salt.utils.event.log, "debug", auto_spec=True + salt.utils.event.log, "debug", autospec=True ) as mock_log_debug: mock_pusher.connect.side_effect = ( salt.ext.tornado.iostream.StreamClosedError @@ -314,10 +317,10 @@ def test_connect_pull_should_error_log_on_other_errors(error): event = SaltEvent(node=None) with patch.object(event, "pusher") as mock_pusher: with patch.object( - salt.utils.event.log, "debug", auto_spec=True + salt.utils.event.log, "debug", autospec=True ) as mock_log_debug: with patch.object( - salt.utils.event.log, "error", auto_spec=True + salt.utils.event.log, "error", autospec=True ) as mock_log_error: mock_pusher.connect.side_effect = error event.connect_pull() @@ -327,3 +330,13 @@ def test_connect_pull_should_error_log_on_other_errors(error): assert not isinstance( call.args[1], salt.ext.tornado.iostream.StreamClosedError ) + + +@pytest.mark.slow_test +def test_master_pub_permissions(sock_dir): + with eventpublisher_process(str(sock_dir)): + p = Path(str(sock_dir)) / "master_event_pub.ipc" + mode = os.lstat(p).st_mode + assert bool(os.lstat(p).st_mode & stat.S_IRUSR) + assert not bool(os.lstat(p).st_mode & stat.S_IRGRP) + assert not bool(os.lstat(p).st_mode & stat.S_IROTH) diff --git a/tests/pytests/unit/utils/jinja/test_custom_extensions.py b/tests/pytests/unit/utils/jinja/test_custom_extensions.py index 4d004230fcbb..d213b69709d4 100644 --- a/tests/pytests/unit/utils/jinja/test_custom_extensions.py +++ b/tests/pytests/unit/utils/jinja/test_custom_extensions.py @@ -46,7 +46,6 @@ def minion_opts(tmp_path, minion_opts): "file_roots": {"test": [str(tmp_path / "templates")]}, "pillar_roots": {"test": [str(tmp_path / "templates")]}, "fileserver_backend": ["roots"], - "hash_type": "md5", "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), @@ -1041,6 +1040,7 @@ def test_method_call(minion_opts, local_salt): assert rendered == "None" +@pytest.mark.skip_on_fips_enabled_platform def test_md5(minion_opts, local_salt): """ Test the `md5` Jinja filter. diff --git a/tests/pytests/unit/utils/jinja/test_get_template.py b/tests/pytests/unit/utils/jinja/test_get_template.py index 35fc188b812d..cdba34fa1716 100644 --- a/tests/pytests/unit/utils/jinja/test_get_template.py +++ b/tests/pytests/unit/utils/jinja/test_get_template.py @@ -61,7 +61,6 @@ def minion_opts(tmp_path, minion_opts): "file_roots": {"test": [str(tmp_path / "files" / "test")]}, "pillar_roots": {"test": [str(tmp_path / "files" / "test")]}, "fileserver_backend": ["roots"], - "hash_type": "md5", "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), diff --git a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py index 38c5ce5b7249..be68660bccf2 100644 --- a/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py +++ b/tests/pytests/unit/utils/jinja/test_salt_cache_loader.py @@ -2,11 +2,10 @@ Tests for salt.utils.jinja """ -import copy import os import pytest -from jinja2 import Environment, exceptions +from jinja2 import Environment, TemplateNotFound, exceptions # dateutils is needed so that the strftime jinja filter is loaded import salt.utils.dateutils # pylint: disable=unused-import @@ -15,7 +14,7 @@ import salt.utils.stringutils # pylint: disable=unused-import import salt.utils.yaml # pylint: disable=unused-import from salt.utils.jinja import SaltCacheLoader -from tests.support.mock import Mock, patch +from tests.support.mock import MagicMock, call, patch @pytest.fixture @@ -25,7 +24,7 @@ def minion_opts(tmp_path, minion_opts): "file_buffer_size": 1048576, "cachedir": str(tmp_path), "file_roots": {"test": [str(tmp_path / "files" / "test")]}, - "pillar_roots": {"test": [str(tmp_path / "files" / "test")]}, + "pillar_roots": {"test": [str(tmp_path / "pillar" / "test")]}, "extension_modules": os.path.join( os.path.dirname(os.path.abspath(__file__)), "extmods" ), @@ -108,7 +107,7 @@ def run_command(opts=None, saltenv="base", **kwargs): if opts is None: opts = minion_opts mock_file_client.opts = opts - loader = SaltCacheLoader(opts, saltenv, _file_client=mock_file_client) + loader = SaltCacheLoader(opts, saltenv, _file_client=mock_file_client, **kwargs) # Create a mock file client and attach it to the loader return loader @@ -128,10 +127,27 @@ def test_searchpath(minion_opts, get_loader, tmp_path): """ The searchpath is based on the cachedir option and the saltenv parameter """ - opts = copy.deepcopy(minion_opts) - opts.update({"cachedir": str(tmp_path)}) - loader = get_loader(opts=minion_opts, saltenv="test") - assert loader.searchpath == [str(tmp_path / "files" / "test")] + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + assert loader.searchpath == minion_opts["file_roots"][saltenv] + + +def test_searchpath_pillar_rend(minion_opts, get_loader): + """ + The searchpath is based on the pillar_rend if it is True + """ + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv, pillar_rend=True) + assert loader.searchpath == minion_opts["pillar_roots"][saltenv] + + +def test_searchpath_bad_pillar_rend(minion_opts, get_loader): + """ + The searchpath is based on the pillar_rend if it is True + """ + saltenv = "bad_env" + loader = get_loader(opts=minion_opts, saltenv=saltenv, pillar_rend=True) + assert loader.searchpath == [] def test_mockclient(minion_opts, template_dir, hello_simple, get_loader): @@ -208,7 +224,7 @@ def test_cached_file_client(get_loader, minion_opts): """ Multiple instantiations of SaltCacheLoader use the cached file client """ - with patch("salt.channel.client.ReqChannel.factory", Mock()): + with patch("salt.channel.client.ReqChannel.factory", MagicMock()): loader_a = SaltCacheLoader(minion_opts) loader_b = SaltCacheLoader(minion_opts) assert loader_a._file_client is loader_b._file_client @@ -224,14 +240,107 @@ def test_file_client_kwarg(minion_opts, mock_file_client): assert loader._file_client is mock_file_client -def test_cache_loader_shutdown(minion_opts, mock_file_client): +def test_cache_loader_passed_file_client(minion_opts, mock_file_client): """ The shudown method can be called without raising an exception when the file_client does not have a destroy method """ - assert not hasattr(mock_file_client, "destroy") - mock_file_client.opts = minion_opts - loader = SaltCacheLoader(minion_opts, _file_client=mock_file_client) - assert loader._file_client is mock_file_client - # Shutdown method should not raise any exceptions - loader.shutdown() + # Test SaltCacheLoader creating and destroying the file client created + file_client = MagicMock() + with patch("salt.fileclient.get_file_client", return_value=file_client): + loader = SaltCacheLoader(minion_opts) + assert loader._file_client is None + with loader: + assert loader._file_client is file_client + assert loader._file_client is None + assert file_client.mock_calls == [call.destroy()] + + # Test SaltCacheLoader reusing the file client passed + file_client = MagicMock() + file_client.opts = {"file_roots": minion_opts["file_roots"]} + with patch("salt.fileclient.get_file_client", return_value=MagicMock()): + loader = SaltCacheLoader(minion_opts, _file_client=file_client) + assert loader._file_client is file_client + with loader: + assert loader._file_client is file_client + assert loader._file_client is file_client + assert file_client.mock_calls == [] + + # Test SaltCacheLoader creating a client even though a file client was + # passed because the "file_roots" option is different, and, as such, + # the destroy method on the new file client is called, but not on the + # file client passed in. + file_client = MagicMock() + file_client.opts = {"file_roots": ""} + new_file_client = MagicMock() + with patch("salt.fileclient.get_file_client", return_value=new_file_client): + loader = SaltCacheLoader(minion_opts, _file_client=file_client) + assert loader._file_client is file_client + with loader: + assert loader._file_client is not file_client + assert loader._file_client is new_file_client + assert loader._file_client is None + assert file_client.mock_calls == [] + assert new_file_client.mock_calls == [call.destroy()] + + +def test_check_cache_miss(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with patch.object(loader, "cached", []): + with patch.object(loader, "cache_file") as cache_mock: + loader.check_cache(str(hello_simple)) + cache_mock.assert_called_once() + + +def test_check_cache_hit(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with patch.object(loader, "cached", [str(hello_simple)]): + with patch.object(loader, "cache_file") as cache_mock: + loader.check_cache(str(hello_simple)) + cache_mock.assert_not_called() + + +def test_get_source_no_environment( + get_loader, minion_opts, relative_rhello, relative_dir +): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source(None, str(".." / relative_rhello.relative_to(relative_dir))) + + +def test_get_source_relative_no_tpldir( + get_loader, minion_opts, relative_rhello, relative_dir +): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source( + MagicMock(globals={}), str(".." / relative_rhello.relative_to(relative_dir)) + ) + + +def test_get_source_template_doesnt_exist(get_loader, minion_opts): + saltenv = "test" + fake_path = "fake_path" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + with pytest.raises(TemplateNotFound): + loader.get_source(None, fake_path) + + +def test_get_source_template_removed(get_loader, minion_opts, hello_simple): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + contents, filepath, uptodate = loader.get_source(None, str(hello_simple)) + hello_simple.unlink() + assert uptodate() is False + + +def test_no_destroy_method_on_file_client(get_loader, minion_opts): + saltenv = "test" + loader = get_loader(opts=minion_opts, saltenv=saltenv) + loader._close_file_client = True + # This should fail silently, thus no error catching + loader.destroy() diff --git a/tests/pytests/unit/utils/parsers/test_daemon_mixin.py b/tests/pytests/unit/utils/parsers/test_daemon_mixin.py new file mode 100644 index 000000000000..ea835d90e4aa --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_daemon_mixin.py @@ -0,0 +1,78 @@ +""" +Tests the PIDfile deletion in the DaemonMixIn. +""" + +import logging + +import pytest + +import salt.utils.parsers +from tests.support.mock import ANY, MagicMock, patch + + +@pytest.fixture +def daemon_mixin(): + mixin = salt.utils.parsers.DaemonMixIn() + mixin.config = {"pidfile": "/some/fake.pid"} + return mixin + + +def test_pid_file_deletion(daemon_mixin): + """ + PIDfile deletion without exception. + """ + with patch("os.unlink", MagicMock()) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + daemon_mixin._mixin_before_exit() + unlink_mock.assert_called_once() + log_mock.info.assert_not_called() + log_mock.debug.assert_not_called() + + +def test_pid_deleted_oserror_as_root(daemon_mixin): + """ + PIDfile deletion with exception, running as root. + """ + with patch("os.unlink", MagicMock(side_effect=OSError())) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + if salt.utils.platform.is_windows(): + patch_args = ( + "salt.utils.win_functions.is_admin", + MagicMock(return_value=True), + ) + else: + patch_args = ("os.getuid", MagicMock(return_value=0)) + + with patch(*patch_args): + daemon_mixin._mixin_before_exit() + assert unlink_mock.call_count == 1 + log_mock.info.assert_called_with( + "PIDfile(%s) could not be deleted: %s", + format(daemon_mixin.config["pidfile"], ""), + ANY, + exc_info_on_loglevel=logging.DEBUG, + ) + + +def test_pid_deleted_oserror_as_non_root(daemon_mixin): + """ + PIDfile deletion with exception, running as non-root. + """ + with patch("os.unlink", MagicMock(side_effect=OSError())) as unlink_mock: + with patch("os.path.isfile", MagicMock(return_value=True)): + with patch("salt.utils.parsers.log", MagicMock()) as log_mock: + if salt.utils.platform.is_windows(): + patch_args = ( + "salt.utils.win_functions.is_admin", + MagicMock(return_value=False), + ) + else: + patch_args = ("os.getuid", MagicMock(return_value=1000)) + + with patch(*patch_args): + daemon_mixin._mixin_before_exit() + assert unlink_mock.call_count == 1 + log_mock.info.assert_not_called() + log_mock.debug.assert_not_called() diff --git a/tests/pytests/unit/utils/parsers/test_log_parsers.py b/tests/pytests/unit/utils/parsers/test_log_parsers.py new file mode 100644 index 000000000000..2b56ccc0da4f --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_log_parsers.py @@ -0,0 +1,784 @@ +""" + :codeauthor: Denys Havrysh +""" + +import logging +import os +import pprint + +import pytest + +import salt._logging +import salt.config +import salt.syspaths +import salt.utils.jid +import salt.utils.parsers +import salt.utils.platform +from tests.support.mock import MagicMock, patch + +log = logging.getLogger(__name__) + + +class LogImplMock: + """ + Logger setup + """ + + def __init__(self): + """ + init + """ + self.log_level_console = None + self.log_file = None + self.log_level_logfile = None + self.config = self.original_config = None + logging_options = salt._logging.get_logging_options_dict() + if logging_options: + self.config = logging_options.copy() + self.original_config = self.config.copy() + self.temp_log_level = None + self._console_handler_configured = False + self._extended_logging_configured = False + self._logfile_handler_configured = False + self._real_set_logging_options_dict = salt._logging.set_logging_options_dict + self._real_get_logging_options_dict = salt._logging.get_logging_options_dict + self._real_setup_logfile_handler = salt._logging.setup_logfile_handler + + def _destroy(self): + salt._logging.set_logging_options_dict.__options_dict__ = self.original_config + salt._logging.shutdown_logfile_handler() + + def setup_temp_handler(self, log_level=None): + """ + Set temp handler loglevel + """ + log.debug("Setting temp handler log level to: %s", log_level) + self.temp_log_level = log_level + + def is_console_handler_configured(self): + log.debug("Calling is_console_handler_configured") + return self._console_handler_configured + + def setup_console_handler( + self, log_level="error", **kwargs + ): # pylint: disable=unused-argument + """ + Set console loglevel + """ + log.debug("Setting console handler log level to: %s", log_level) + self.log_level_console = log_level + self._console_handler_configured = True + + def shutdown_console_handler(self): + log.debug("Calling shutdown_console_handler") + self._console_handler_configured = False + + def is_extended_logging_configured(self): + log.debug("Calling is_extended_logging_configured") + return self._extended_logging_configured + + def setup_extended_logging(self, opts): + """ + Set opts + """ + log.debug("Calling setup_extended_logging") + self._extended_logging_configured = True + + def shutdown_extended_logging(self): + log.debug("Calling shutdown_extended_logging") + self._extended_logging_configured = False + + def is_logfile_handler_configured(self): + log.debug("Calling is_logfile_handler_configured") + return self._logfile_handler_configured + + def setup_logfile_handler( + self, log_path, log_level=None, **kwargs + ): # pylint: disable=unused-argument + """ + Set logfile and loglevel + """ + log.debug("Setting log file handler path to: %s", log_path) + log.debug("Setting log file handler log level to: %s", log_level) + self.log_file = log_path + self.log_level_logfile = log_level + self._real_setup_logfile_handler(log_path, log_level=log_level, **kwargs) + self._logfile_handler_configured = True + + def shutdown_logfile_handler(self): + log.debug("Calling shutdown_logfile_handler") + self._logfile_handler_configured = False + + def get_logging_options_dict(self): + log.debug("Calling get_logging_options_dict") + return self.config + + def set_logging_options_dict(self, opts): + log.debug("Calling set_logging_options_dict") + self._real_set_logging_options_dict(opts) + self.config = self._real_get_logging_options_dict() + log.debug("Logging options dict:\n%s", pprint.pformat(self.config)) + + def setup_log_granular_levels(self, opts): + log.debug("Calling setup_log_granular_levels") + + def setup_logging(self): + log.debug("Mocked setup_logging called") + # Wether daemonizing or not, either on the main process or on a separate process + # The log file is going to be configured. + # The console is the only handler not configured if daemonizing + + # These routines are what happens on salt._logging.setup_logging + opts = self.get_logging_options_dict() + + if ( + opts.get("configure_console_logger", True) + and not self.is_console_handler_configured() + ): + self.setup_console_handler( + log_level=opts["log_level"], + log_format=opts["log_fmt_console"], + date_format=opts["log_datefmt"], + ) + if ( + opts.get("configure_file_logger", True) + and not self.is_logfile_handler_configured() + ): + log_file_level = opts["log_level_logfile"] or opts["log_level"] + if log_file_level != "quiet": + self.setup_logfile_handler( + log_path=opts[opts["log_file_key"]], + log_level=log_file_level, + log_format=opts["log_fmt_logfile"], + date_format=opts["log_datefmt_logfile"], + max_bytes=opts["log_rotate_max_bytes"], + backup_count=opts["log_rotate_backup_count"], + user=opts["user"], + ) + if not self.is_extended_logging_configured(): + self.setup_extended_logging(opts) + self.setup_log_granular_levels(opts["log_granular_levels"]) + + def __enter__(self): + return self + + def __exit__(self, *_): + self._destroy() + + +# <----------- START TESTS -----------> + + +@pytest.fixture +def root_dir(tmp_path): + yield tmp_path / "parsers_tests_root_dir" + + +@pytest.fixture( + params=[ + "master", + "minion", + "proxyminion", + "syndic", + "saltcmd", + "saltcp", + "saltkey", + "saltcall", + "saltrun", + "saltssh", + "saltcloud", + "spm", + "saltapi", + ] +) +def log_cli_parser(request): + return request.param + + +@pytest.fixture +def default_config(log_cli_parser): + if log_cli_parser == "master": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "minion": + return salt.config.DEFAULT_MINION_OPTS.copy() + elif log_cli_parser == "proxyminion": + return { + **salt.config.DEFAULT_MINION_OPTS.copy(), + **salt.config.DEFAULT_PROXY_MINION_OPTS.copy(), + } + elif log_cli_parser == "syndic": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcmd": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcp": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltkey": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcall": + return salt.config.DEFAULT_MINION_OPTS.copy() + elif log_cli_parser == "saltrun": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltssh": + return salt.config.DEFAULT_MASTER_OPTS.copy() + elif log_cli_parser == "saltcloud": + return { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_CLOUD_OPTS.copy(), + } + elif log_cli_parser == "spm": + return { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_SPM_OPTS.copy(), + } + elif log_cli_parser == "saltapi": + return { + **salt.config.DEFAULT_MASTER_OPTS.copy(), + **salt.config.DEFAULT_API_OPTS.copy(), + } + + +@pytest.fixture +def parser(log_cli_parser): + param_map = { + "master": salt.utils.parsers.MasterOptionParser, + "minion": salt.utils.parsers.MinionOptionParser, + "proxyminion": salt.utils.parsers.ProxyMinionOptionParser, + "syndic": salt.utils.parsers.SyndicOptionParser, + "saltcmd": salt.utils.parsers.SaltCMDOptionParser, + "saltcp": salt.utils.parsers.SaltCPOptionParser, + "saltkey": salt.utils.parsers.SaltKeyOptionParser, + "saltcall": salt.utils.parsers.SaltCallOptionParser, + "saltrun": salt.utils.parsers.SaltRunOptionParser, + "saltssh": salt.utils.parsers.SaltSSHOptionParser, + "saltcloud": salt.utils.parsers.SaltCloudParser, + "spm": salt.utils.parsers.SPMParser, + "saltapi": salt.utils.parsers.SaltAPIParser, + } + return param_map[log_cli_parser] + + +@pytest.fixture +def config_func(log_cli_parser): + param_map = { + "master": "salt.config.master_config", + "minion": "salt.config.minion_config", + "proxyminion": "salt.config.proxy_config", + "syndic": "salt.config.syndic_config", + "saltcmd": "salt.config.client_config", + "saltcp": "salt.config.master_config", + "saltkey": "salt.config.client_config", + "saltcall": "salt.config.minion_config", + "saltrun": "salt.config.master_config", + "saltssh": "salt.config.master_config", + "saltcloud": "salt.config.cloud_config", + "spm": "salt.config.spm_config", + "saltapi": "salt.config.api_config", + } + return param_map[log_cli_parser] + + +@pytest.fixture +def log_file(tmp_path, logfile_config_setting_name): + return str(tmp_path / logfile_config_setting_name) + + +@pytest.fixture +def args(log_cli_parser): + if log_cli_parser in ("saltcmd", "saltssh"): + return ["foo", "bar.baz"] + elif log_cli_parser == "saltcp": + return ["foo", "bar", "baz"] + elif log_cli_parser in ("saltcall", "saltrun"): + return ["foo.bar"] + elif log_cli_parser == "saltcloud": + return ["-p", "foo", "bar"] + elif log_cli_parser == "spm": + return ["foo", "bar"] + return [] + + +@pytest.fixture +def loglevel_config_setting_name(): + return "log_level" + + +@pytest.fixture +def logfile_config_setting_name(log_cli_parser): + if log_cli_parser == "syndic": + return "syndic_log_file" + elif log_cli_parser == "saltkey": + return "key_logfile" + elif log_cli_parser == "saltssh": + return "ssh_log_file" + elif log_cli_parser == "spm": + return "spm_logfile" + elif log_cli_parser == "saltapi": + return "api_logfile" + return "log_file" + + +@pytest.fixture +def logfile_loglevel_config_setting_name(): + return "log_level_logfile" + + +@pytest.fixture +def testing_config(default_config, root_dir, logfile_config_setting_name, log_file): + _testing_config = default_config.copy() + _testing_config["root_dir"] = root_dir + for name in ("pki_dir", "cachedir"): + _testing_config[name] = name + _testing_config[logfile_config_setting_name] = log_file + return _testing_config + + +@pytest.fixture(autouse=True) +def log_impl(): + """ + Mock logger functions + """ + with LogImplMock() as _log_impl: + mocked_functions = {} + for name in dir(_log_impl): + if name.startswith("_"): + continue + func = getattr(_log_impl, name) + if not callable(func): + continue + mocked_functions[name] = func + + patcher = patch.multiple(salt._logging, **mocked_functions) + with patcher: + yield _log_impl + + +def test_get_log_level_cli( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match command-line specified value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + # Set log level in CLI + log_level = "critical" + args = ["--log-level", log_level] + args + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + console_log_level = getattr(instance.options, loglevel_config_setting_name) + + # Check console log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == log_level + # Check log file logger log level + assert log_impl.log_level_logfile == default_log_level + + +def test_get_log_level_config( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match the configured value + """ + # Set log level in config + log_level = "info" + testing_config.update({loglevel_config_setting_name: log_level}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + console_log_level = getattr(instance.options, loglevel_config_setting_name) + + # Check console log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == "error" + # Check log file logger log level + assert log_impl.log_level_logfile == log_level + + +def test_get_log_level_default( + testing_config, loglevel_config_setting_name, args, parser, config_func, log_impl +): + """ + Tests that log level match the default value + """ + # Set defaults + log_level = default_log_level = testing_config[loglevel_config_setting_name] + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + console_log_level = getattr(instance.options, loglevel_config_setting_name) + + # Check log level setting + assert console_log_level == log_level + # Check console logger log level + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.temp_log_level == "error" + # Check log file logger + assert log_impl.log_level_logfile == default_log_level + # Check help message + assert ( + "Default: '{}'.".format(default_log_level) + in instance.get_option("--log-level").help + ) + + +# log file configuration tests + + +def test_get_log_file_cli( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + log_file, + logfile_config_setting_name, +): + """ + Tests that log file match command-line specified value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file in CLI + log_file = "{}_cli.log".format(log_file) + args = ["--log-file", log_file] + args + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_file_option = getattr(instance.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + + +def test_get_log_file_config( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_config_setting_name, + log_file, +): + """ + Tests that log file match the configured value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file in config + log_file = "{}_config.log".format(log_file) + testing_config.update({logfile_config_setting_name: log_file}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_file_option = getattr(instance.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + + +def test_get_log_file_default( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_config_setting_name, + default_config, +): + """ + Tests that log file match the default value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + log_file = testing_config[logfile_config_setting_name] + default_log_file = default_config[logfile_config_setting_name] + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_file_option = getattr(instance.options, logfile_config_setting_name) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_config_setting_name] == log_file + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file setting + assert log_file_option == log_file + # Check log file logger + assert log_impl.log_file == log_file + # Check help message + assert ( + "Default: '{}'.".format(default_log_file) + in instance.get_option("--log-file").help + ) + + +# log file log level configuration tests + + +def test_get_log_file_level_cli( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that file log level match command-line specified value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + # Set log file level in CLI + log_level_logfile = "error" + args = ["--log-file-level", log_level_logfile] + args + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_level_logfile_option = getattr( + instance.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == default_log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == default_log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_get_log_file_level_config( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that log file level match the configured value + """ + # Set defaults + log_level = testing_config[loglevel_config_setting_name] + + # Set log file level in config + log_level_logfile = "info" + testing_config.update({logfile_loglevel_config_setting_name: log_level_logfile}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_level_logfile_option = getattr( + instance.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_get_log_file_level_default( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): + """ + Tests that log file level match the default value + """ + # Set defaults + default_log_level = testing_config[loglevel_config_setting_name] + + log_level = default_log_level + log_level_logfile = default_log_level + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_level_logfile_option = getattr( + instance.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + # Check help message + assert ( + "Default: '{}'.".format(default_log_level) + in instance.get_option("--log-file-level").help + ) + + +def test_get_console_log_level_with_file_log_level( + testing_config, + loglevel_config_setting_name, + args, + parser, + config_func, + log_impl, + logfile_loglevel_config_setting_name, +): # pylint: disable=invalid-name + """ + Tests that both console log level and log file level setting are working together + """ + log_level = "critical" + log_level_logfile = "debug" + + args = ["--log-file-level", log_level_logfile] + args + + testing_config.update({loglevel_config_setting_name: log_level}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + log_level_logfile_option = getattr( + instance.options, logfile_loglevel_config_setting_name + ) + + # Check console logger + assert log_impl.log_level_console == log_level + # Check extended logger + assert log_impl.config[loglevel_config_setting_name] == log_level + assert log_impl.config[logfile_loglevel_config_setting_name] == log_level_logfile + # Check temp logger + assert log_impl.temp_log_level == "error" + # Check log file level setting + assert log_level_logfile_option == log_level_logfile + # Check log file logger + assert log_impl.log_level_logfile == log_level_logfile + + +def test_log_created( + testing_config, args, parser, config_func, logfile_config_setting_name, log_file +): + """ + Tests that log file is created + """ + testing_config.update({"log_file": str(log_file)}) + log_file_name = str(log_file) + if log_file_name.split(os.sep)[-1] != "log_file": + testing_config.update({log_file_name: str(log_file)}) + + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + + assert os.path.exists(str(log_file_name)) + + +def test_callbacks_uniqueness(parser): + """ + Test that the callbacks are only added once, no matter + how many instances of the parser we create + """ + mixin_container_names = ( + "_mixin_setup_funcs", + "_mixin_process_funcs", + "_mixin_after_parsed_funcs", + "_mixin_before_exit_funcs", + ) + instance = parser() + nums_1 = {} + for cb_container in mixin_container_names: + obj = getattr(instance, cb_container) + nums_1[cb_container] = len(obj) + + # The next time we instantiate the parser, the counts should be equal + instance = parser() + nums_2 = {} + for cb_container in mixin_container_names: + obj = getattr(instance, cb_container) + nums_2[cb_container] = len(obj) + assert nums_1 == nums_2 + + +def test_verify_log_warning_logged(args, config_func, testing_config, parser, caplog): + args = ["--log-level", "debug"] + args + with caplog.at_level(logging.DEBUG): + instance = parser() + with patch(config_func, MagicMock(return_value=testing_config)): + instance.parse_args(args) + assert ( + "Insecure logging configuration detected! Sensitive data may be logged." + in caplog.messages + ) diff --git a/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py b/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py new file mode 100644 index 000000000000..fa99f26c0813 --- /dev/null +++ b/tests/pytests/unit/utils/parsers/test_saltfile_mixin.py @@ -0,0 +1,216 @@ +""" +Tests the SaltfileMixIn. +""" + +import optparse +import shutil + +import pytest + +import salt.exceptions +import salt.utils.parsers +from tests.support.helpers import patched_environ +from tests.support.mock import patch + + +class MockSaltfileParser( + salt.utils.parsers.OptionParser, + salt.utils.parsers.SaltfileMixIn, + metaclass=salt.utils.parsers.OptionParserMeta, +): + def __init__(self, *args, **kwargs): + salt.utils.parsers.OptionParser.__init__(self, *args, **kwargs) + self.config = {} + + def _mixin_setup(self): + self.add_option( + "-l", + "--log-level", + dest="log_level", + default="warning", + help="The log level for salt.", + ) + group = self.output_options_group = optparse.OptionGroup( + self, "Output Options", "Configure your preferred output format." + ) + self.add_option_group(group) + + group.add_option( + "--out", + "--output", + dest="output", + help=( + "Print the output from the '{}' command using the " + "specified outputter.".format( + self.get_prog_name(), + ) + ), + ) + group.add_option( + "--out-file", + "--output-file", + dest="output_file", + default=None, + help="Write the output to the specified file.", + ) + group.add_option( + "--version-arg", + action="version", + help="Option to test no dest", + ) + + +@pytest.fixture +def parser(): + return MockSaltfileParser() + + +@pytest.fixture +def saltfile(tmp_path): + fp = tmp_path / "Saltfile" + fp.touch() + return fp + + +def test_saltfile_in_environment(parser, saltfile): + """ + Test setting the SALT_SALTFILE environment variable + """ + with patched_environ(SALT_SALTFILE=str(saltfile)): + parser.parse_args([]) + assert parser.options.saltfile == str(saltfile) + + +def test_saltfile_option(parser, saltfile): + """ + Test setting the saltfile via the CLI + """ + parser.parse_args(["--saltfile", str(saltfile)]) + assert parser.options.saltfile == str(saltfile) + + +def test_bad_saltfile_option(parser, saltfile, tmp_path): + """ + Test setting a bad saltfile via the CLI + """ + with pytest.raises(SystemExit): + parser.parse_args(["--saltfile", str(tmp_path / "fake_dir")]) + + +def test_saltfile_cwd(parser, saltfile, tmp_path): + """ + Test using a saltfile in the cwd + """ + with patch("os.getcwd", return_value=str(tmp_path)) as cwd_mock: + parser.parse_args([]) + assert parser.options.saltfile == str(saltfile) + cwd_mock.assert_called_once() + + +def test_saltfile_cwd_doesnt_exist(parser, saltfile, tmp_path): + """ + Test using a saltfile in the cwd that doesn't exist + """ + with patch("os.getcwd", return_value=str(tmp_path / "fake_dir")) as cwd_mock: + parser.parse_args([]) + assert parser.options.saltfile is None + + +def test_saltfile_user_home(parser, saltfile, tmp_path): + """ + Test using a saltfile in ~/.salt/ + """ + fake_dir = tmp_path / "fake_dir" + fake_dir.mkdir() + with patch("os.getcwd", return_value=str(fake_dir)) as cwd_mock: + with patch("os.path.expanduser", return_value=str(tmp_path)) as eu_mock: + salt_subdir = tmp_path / ".salt" + salt_subdir.mkdir() + dest = str(salt_subdir / "Saltfile") + shutil.copy(str(saltfile), dest) + parser.parse_args([]) + assert parser.options.saltfile == dest + cwd_mock.assert_called_once() + eu_mock.assert_called_with("~") + + +def test_bad_saltfile(parser, saltfile): + """ + Test a saltfile with bad configuration + """ + contents = """ + bad "yaml": + - this is: bad yaml + - bad yaml=data: + - {"bad": yaml, "data": "yaml"} + """ + saltfile.write_text(contents) + # It raises two errors, let's catch them both + with pytest.raises(SystemExit): + with pytest.raises(salt.exceptions.SaltConfigurationError): + parser.parse_args(["--saltfile", str(saltfile)]) + + +def test_saltfile_without_prog_name(parser, saltfile): + """ + Test a saltfile with valid yaml but without the program name in it + """ + contents = "good: yaml" + saltfile.write_text(contents) + # This should just run cleanly + parser.parse_args(["--saltfile", str(saltfile)]) + + +def test_saltfile(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + log_level: debug + output: json + """ + saltfile.write_text(contents) + parser.parse_args(["--saltfile", str(saltfile)]) + print(parser.option_list) + assert parser.options.log_level == "debug" + assert parser.options.output == "json" + + +def test_saltfile_unusual_option(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + go: birds + """ + saltfile.write_text(contents) + parser.parse_args(["--saltfile", str(saltfile)]) + assert parser.options.go == "birds" + + +def test_saltfile_cli_override(parser, saltfile): + """ + Test a valid saltfile + """ + contents = """ + __main__.py: + log_level: debug + output: json + output_file: /fake/file + """ + saltfile.write_text(contents) + parser.parse_args( + [ + "--saltfile", + str(saltfile), + "--log-level", + "info", + "--out-file", + "/still/fake/file", + ] + ) + assert parser.options.log_level == "info" + assert parser.options.output == "json" + assert parser.options.output_file == "/still/fake/file" diff --git a/tests/pytests/unit/utils/templates/__init__.py b/tests/pytests/unit/utils/templates/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/pytests/unit/utils/templates/conftest.py b/tests/pytests/unit/utils/templates/conftest.py new file mode 100644 index 000000000000..948dd06fb090 --- /dev/null +++ b/tests/pytests/unit/utils/templates/conftest.py @@ -0,0 +1,9 @@ +import pytest + + +@pytest.fixture +def render_context(): + return { + "opts": {"cachedir": "/D", "__cli": "salt"}, + "saltenv": None, + } diff --git a/tests/pytests/unit/utils/templates/test_cheetah.py b/tests/pytests/unit/utils/templates/test_cheetah.py new file mode 100644 index 000000000000..81f6e2b7814b --- /dev/null +++ b/tests/pytests/unit/utils/templates/test_cheetah.py @@ -0,0 +1,47 @@ +import pytest +from salt.utils.templates import render_cheetah_tmpl + +pytest.importorskip("Cheetah") + + +def test_render_sanity(render_context): + tmpl = """OK""" + res = render_cheetah_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_evaluate(render_context): + tmpl = """<%="OK"%>""" + res = render_cheetah_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_evaluate_xml(render_context): + tmpl = """ + <% if 1: %> + OK + <% pass %> + """ + res = render_cheetah_tmpl(tmpl, render_context) + stripped = res.strip() + assert stripped == "OK" + + +def test_render_evaluate_text(render_context): + tmpl = """ + #if 1 + OK + #end if + """ + + res = render_cheetah_tmpl(tmpl, render_context) + stripped = res.strip() + assert stripped == "OK" + + +def test_render_variable(render_context): + tmpl = """$var""" + + render_context["var"] = "OK" + res = render_cheetah_tmpl(tmpl, render_context) + assert res.strip() == "OK" diff --git a/tests/pytests/unit/utils/templates/test_genshi.py b/tests/pytests/unit/utils/templates/test_genshi.py new file mode 100644 index 000000000000..3ea03b210b33 --- /dev/null +++ b/tests/pytests/unit/utils/templates/test_genshi.py @@ -0,0 +1,36 @@ +import pytest +from salt.utils.templates import render_genshi_tmpl + +pytest.importorskip("genshi") + + +def test_render_sanity(render_context): + tmpl = """OK""" + res = render_genshi_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_evaluate(render_context): + tmpl = """${ "OK" }""" + res = render_genshi_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_evaluate_condition(render_context): + tmpl = """OK""" + res = render_genshi_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_variable(render_context): + tmpl = """$var""" + render_context["var"] = "OK" + res = render_genshi_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_variable_replace(render_context): + tmpl = """not ok""" + render_context["var"] = "OK" + res = render_genshi_tmpl(tmpl, render_context) + assert res == "OK" diff --git a/tests/pytests/unit/utils/templates/test_jinja.py b/tests/pytests/unit/utils/templates/test_jinja.py index 8e0257bbfc4e..6d47e6b80d3a 100644 --- a/tests/pytests/unit/utils/templates/test_jinja.py +++ b/tests/pytests/unit/utils/templates/test_jinja.py @@ -1,41 +1,17 @@ """ Tests for salt.utils.templates """ -import os import re +from collections import OrderedDict import pytest from salt.exceptions import SaltRenderError from salt.utils.templates import render_jinja_tmpl +from tests.support.mock import patch -@pytest.fixture -def minion_opts(tmp_path, minion_opts): - minion_opts.update( - { - "cachedir": str(tmp_path / "jinja-template-cache"), - "file_buffer_size": 1048576, - "file_client": "local", - "file_ignore_regex": None, - "file_ignore_glob": None, - "file_roots": {"test": [str(tmp_path / "templates")]}, - "pillar_roots": {"test": [str(tmp_path / "templates")]}, - "fileserver_backend": ["roots"], - "hash_type": "md5", - "extension_modules": os.path.join( - os.path.dirname(os.path.abspath(__file__)), "extmods" - ), - } - ) - return minion_opts - - -@pytest.fixture -def local_salt(): - return {} - -def test_jinja_undefined_error_context(minion_opts, local_salt): +def test_undefined_error_context(render_context): """ Test that jinja provides both the line number on which the error occurred in the Jinja template, and also several lines of context around the error @@ -63,5 +39,64 @@ def test_jinja_undefined_error_context(minion_opts, local_salt): with pytest.raises(SaltRenderError, match=match_regex): render_jinja_tmpl( jinja_code, - dict(opts=minion_opts, saltenv="test", salt=local_salt), + render_context, ) + + +def test_render_sanity(render_context): + tmpl = """OK""" + res = render_jinja_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_evaluate(render_context): + tmpl = """{{ "OK" }}""" + res = render_jinja_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_evaluate_multi(render_context): + tmpl = """{% if 1 -%}OK{%- endif %}""" + res = render_jinja_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_variable(render_context): + tmpl = """{{ var }}""" + render_context["var"] = "OK" + res = render_jinja_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_tojson_sorted(render_context): + templ = """thing: {{ var|tojson(sort_keys=True) }}""" + expected = """thing: {"x": "xxx", "y": "yyy", "z": "zzz"}""" + + with patch.dict(render_context, {"var": {"z": "zzz", "y": "yyy", "x": "xxx"}}): + res = render_jinja_tmpl(templ, render_context) + + assert res == expected + + +def test_render_tojson_unsorted(render_context): + templ = """thing: {{ var|tojson(sort_keys=False) }}""" + expected = """thing: {"z": "zzz", "x": "xxx", "y": "yyy"}""" + + # Values must be added to the dict in the expected order. This is + # only necessary for older Pythons that don't remember dict order. + d = OrderedDict() + d["z"] = "zzz" + d["x"] = "xxx" + d["y"] = "yyy" + + with patch.dict(render_context, {"var": d}): + res = render_jinja_tmpl(templ, render_context) + + assert res == expected + + +def test_render_cve_2021_25283(render_context): + tmpl = """{{ [].__class__ }}""" + render_context["var"] = "OK" + with pytest.raises(SaltRenderError): + res = render_jinja_tmpl(tmpl, render_context) diff --git a/tests/pytests/unit/utils/templates/test_mako.py b/tests/pytests/unit/utils/templates/test_mako.py new file mode 100644 index 000000000000..db3cf59887f3 --- /dev/null +++ b/tests/pytests/unit/utils/templates/test_mako.py @@ -0,0 +1,34 @@ +import pytest +from salt.utils.templates import render_mako_tmpl + +pytest.importorskip("mako") + + +def test_render_mako_sanity(render_context): + tmpl = """OK""" + res = render_mako_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_mako_evaluate(render_context): + tmpl = """${ "OK" }""" + res = render_mako_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_mako_evaluate_multi(render_context): + tmpl = """ + % if 1: + OK + % endif + """ + res = render_mako_tmpl(tmpl, render_context) + stripped = res.strip() + assert stripped == "OK" + + +def test_render_mako_variable(render_context): + tmpl = """${ var }""" + render_context["var"] = "OK" + res = render_mako_tmpl(tmpl, render_context) + assert res == "OK" diff --git a/tests/pytests/unit/utils/templates/test_wempy.py b/tests/pytests/unit/utils/templates/test_wempy.py new file mode 100644 index 000000000000..45155fd4d5f8 --- /dev/null +++ b/tests/pytests/unit/utils/templates/test_wempy.py @@ -0,0 +1,29 @@ +import pytest +from salt.utils.templates import render_wempy_tmpl + +pytest.importorskip("wemplate") + + +def test_render_wempy_sanity(render_context): + tmpl = """OK""" + res = render_wempy_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_wempy_evaluate(render_context): + tmpl = """{{="OK"}}""" + res = render_wempy_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_wempy_evaluate_multi(render_context): + tmpl = """{{if 1:}}OK{{pass}}""" + res = render_wempy_tmpl(tmpl, render_context) + assert res == "OK" + + +def test_render_wempy_variable(render_context): + tmpl = """{{=var}}""" + render_context["var"] = "OK" + res = render_wempy_tmpl(tmpl, render_context) + assert res == "OK" diff --git a/tests/pytests/unit/utils/templates/test_wrap_tmpl_func.py b/tests/pytests/unit/utils/templates/test_wrap_tmpl_func.py new file mode 100644 index 000000000000..9ab74f0377d2 --- /dev/null +++ b/tests/pytests/unit/utils/templates/test_wrap_tmpl_func.py @@ -0,0 +1,217 @@ +""" +Unit tests for salt.utils.templates.py +""" +import logging +from pathlib import PurePath, PurePosixPath + +import pytest + +from salt.utils.templates import wrap_tmpl_func, generate_sls_context +from tests.support.mock import patch + + +log = logging.getLogger(__name__) + + +class MockRender: + def __call__(self, tplstr, context, tmplpath=None): + self.tplstr = tplstr + self.context = context + self.tmplpath = tmplpath + return tplstr + + +def _test_generated_sls_context(tmplpath, sls, **expected): + """Generic SLS Context Test""" + # DeNormalize tmplpath + tmplpath = str(PurePath(PurePosixPath(tmplpath))) + if tmplpath.startswith("\\"): + tmplpath = "C:{}".format(tmplpath) + expected["tplpath"] = tmplpath + actual = generate_sls_context(tmplpath, sls) + assert {key: actual[key] for key in expected if key in actual} == actual + + +def test_sls_context_call(tmp_path): + """Check that generate_sls_context is called with proper parameters""" + sls = "foo.bar" + slsfile = tmp_path / "foo" / "bar.sls" + slsfile.parent.mkdir() + slsfile.write_text("{{ slspath }}") + context = {"opts": {}, "saltenv": "base", "sls": sls} + render = MockRender() + with patch("salt.utils.templates.generate_sls_context") as generate_sls_context: + wrapped = wrap_tmpl_func(render) + res = wrapped(str(slsfile), context=context, tmplpath=str(slsfile)) + generate_sls_context.assert_called_with(str(slsfile), sls) + + +def test_sls_context_no_call(tmp_path): + """Check that generate_sls_context is not called if sls is not set""" + sls = "foo.bar" + slsfile = tmp_path / "foo" / "bar.sls" + slsfile.parent.mkdir() + slsfile.write_text("{{ slspath }}") + context = {"opts": {}, "saltenv": "base"} + render = MockRender() + with patch("salt.utils.templates.generate_sls_context") as generate_sls_context: + wrapped = wrap_tmpl_func(render) + res = wrapped(str(slsfile), context=context, tmplpath=str(slsfile)) + generate_sls_context.assert_not_called() + + +def test_generate_sls_context__top_level(): + """generate_sls_context - top_level Use case""" + _test_generated_sls_context( + "/tmp/boo.sls", + "boo", + tplfile="boo.sls", + tpldir=".", + tpldot="", + slsdotpath="", + slscolonpath="", + sls_path="", + slspath="", + ) + + +def test_generate_sls_context__one_level_init_implicit(): + """generate_sls_context - Basic one level with implicit init.sls""" + _test_generated_sls_context( + "/tmp/foo/init.sls", + "foo", + tplfile="foo/init.sls", + tpldir="foo", + tpldot="foo", + slsdotpath="foo", + slscolonpath="foo", + sls_path="foo", + slspath="foo", + ) + + +def test_generate_sls_context__one_level_init_explicit(): + """generate_sls_context - Basic one level with explicit init.sls""" + _test_generated_sls_context( + "/tmp/foo/init.sls", + "foo.init", + tplfile="foo/init.sls", + tpldir="foo", + tpldot="foo", + slsdotpath="foo", + slscolonpath="foo", + sls_path="foo", + slspath="foo", + ) + + +def test_generate_sls_context__one_level(): + """generate_sls_context - Basic one level with name""" + _test_generated_sls_context( + "/tmp/foo/boo.sls", + "foo.boo", + tplfile="foo/boo.sls", + tpldir="foo", + tpldot="foo", + slsdotpath="foo", + slscolonpath="foo", + sls_path="foo", + slspath="foo", + ) + + +def test_generate_sls_context__one_level_repeating(): + """generate_sls_context - Basic one level with name same as dir + + (Issue #56410) + """ + _test_generated_sls_context( + "/tmp/foo/foo.sls", + "foo.foo", + tplfile="foo/foo.sls", + tpldir="foo", + tpldot="foo", + slsdotpath="foo", + slscolonpath="foo", + sls_path="foo", + slspath="foo", + ) + + +def test_generate_sls_context__two_level_init_implicit(): + """generate_sls_context - Basic two level with implicit init.sls""" + _test_generated_sls_context( + "/tmp/foo/bar/init.sls", + "foo.bar", + tplfile="foo/bar/init.sls", + tpldir="foo/bar", + tpldot="foo.bar", + slsdotpath="foo.bar", + slscolonpath="foo:bar", + sls_path="foo_bar", + slspath="foo/bar", + ) + + +def test_generate_sls_context__two_level_init_explicit(): + """generate_sls_context - Basic two level with explicit init.sls""" + _test_generated_sls_context( + "/tmp/foo/bar/init.sls", + "foo.bar.init", + tplfile="foo/bar/init.sls", + tpldir="foo/bar", + tpldot="foo.bar", + slsdotpath="foo.bar", + slscolonpath="foo:bar", + sls_path="foo_bar", + slspath="foo/bar", + ) + + +def test_generate_sls_context__two_level(): + """generate_sls_context - Basic two level with name""" + _test_generated_sls_context( + "/tmp/foo/bar/boo.sls", + "foo.bar.boo", + tplfile="foo/bar/boo.sls", + tpldir="foo/bar", + tpldot="foo.bar", + slsdotpath="foo.bar", + slscolonpath="foo:bar", + sls_path="foo_bar", + slspath="foo/bar", + ) + + +def test_generate_sls_context__two_level_repeating(): + """generate_sls_context - Basic two level with name same as dir + + (Issue #56410) + """ + _test_generated_sls_context( + "/tmp/foo/foo/foo.sls", + "foo.foo.foo", + tplfile="foo/foo/foo.sls", + tpldir="foo/foo", + tpldot="foo.foo", + slsdotpath="foo.foo", + slscolonpath="foo:foo", + sls_path="foo_foo", + slspath="foo/foo", + ) + + +@pytest.mark.skip_on_windows +def test_generate_sls_context__backslash_in_path(): + """generate_sls_context - Handle backslash in path on non-windows""" + _test_generated_sls_context( + "/tmp/foo/foo\\foo.sls", + "foo.foo\\foo", + tplfile="foo/foo\\foo.sls", + tpldir="foo", + tpldot="foo", + slsdotpath="foo", + slscolonpath="foo", + sls_path="foo", + slspath="foo", + ) diff --git a/tests/pytests/unit/utils/test_aws.py b/tests/pytests/unit/utils/test_aws.py index 8881ff34aadd..bd4dc53cc8b3 100644 --- a/tests/pytests/unit/utils/test_aws.py +++ b/tests/pytests/unit/utils/test_aws.py @@ -6,10 +6,12 @@ """ import io +import time +from datetime import datetime, timedelta import requests -from salt.utils.aws import get_metadata +import salt.utils.aws as aws from tests.support.mock import MagicMock, patch @@ -19,7 +21,7 @@ def test_get_metadata_imdsv1(): response.reason = "OK" response.raw = io.BytesIO(b"""test""") with patch("requests.get", return_value=response): - result = get_metadata("/") + result = aws.get_metadata("/") assert result.text == "test" @@ -48,5 +50,138 @@ def handle_get_mock(_, **args): with patch("requests.get", MagicMock(side_effect=handle_get_mock)), patch( "requests.put", return_value=put_response ): - result = get_metadata("/") + result = aws.get_metadata("/") assert result.text == "test" + + +def test_assumed_creds_not_updating_dictionary_while_iterating(): + mock_cache = { + "expired": { + "Expiration": time.mktime(datetime.utcnow().timetuple()), + }, + "not_expired_1": { + "Expiration": time.mktime( + (datetime.utcnow() + timedelta(days=1)).timetuple() + ), + "AccessKeyId": "mock_AccessKeyId", + "SecretAccessKey": "mock_SecretAccessKey", + "SessionToken": "mock_SessionToken", + }, + "not_expired_2": { + "Expiration": time.mktime( + (datetime.utcnow() + timedelta(seconds=300)).timetuple() + ), + }, + } + with patch.dict(aws.__AssumeCache__, mock_cache): + ret = aws.assumed_creds({}, "not_expired_1") + assert "expired" not in aws.__AssumeCache__ + assert ret == ("mock_AccessKeyId", "mock_SecretAccessKey", "mock_SessionToken") + + +def test_assumed_creds_deletes_expired_key(): + mock_cache = { + "expired": { + "Expiration": time.mktime(datetime.utcnow().timetuple()), + }, + "not_expired_1": { + "Expiration": time.mktime( + (datetime.utcnow() + timedelta(days=1)).timetuple() + ), + "AccessKeyId": "mock_AccessKeyId", + "SecretAccessKey": "mock_SecretAccessKey", + "SessionToken": "mock_SessionToken", + }, + "not_expired_2": { + "Expiration": time.mktime( + (datetime.utcnow() + timedelta(seconds=300)).timetuple() + ), + }, + } + creds_dict = { + "AccessKeyId": "mock_AccessKeyId", + "SecretAccessKey": "mock_SecretAccessKey", + "SessionToken": "mock_SessionToken", + } + response_mock = MagicMock() + response_mock.status_code = 200 + response_mock.json.return_value = { + "AssumeRoleResponse": { + "AssumeRoleResult": { + "Credentials": creds_dict, + }, + }, + } + with patch.dict(aws.__AssumeCache__, mock_cache): + with patch.object(aws, "sig4", return_value=({}, "fakeurl.com")): + with patch("requests.request", return_value=response_mock): + ret = aws.assumed_creds({}, "expired") + assert "expired" in aws.__AssumeCache__ + assert aws.__AssumeCache__["expired"] == creds_dict + assert ret == ( + "mock_AccessKeyId", + "mock_SecretAccessKey", + "mock_SessionToken", + ) + + +def test_creds_with_role_arn_should_always_call_assumed_creds(): + role_arn = "arn:aws:iam::111111111111:role/my-role-to-assume" + + access_key_id = "mock_AccessKeyId" + secret_access_key = "mock_SecretAccessKey" + token = "mock_Token" + expiration = (datetime.utcnow() + timedelta(seconds=900)).strftime( + "%Y-%m-%dT%H:%M:%SZ" + ) + + patch_expiration = patch("salt.utils.aws.__Expiration__", new=expiration) + + def handle_get_metadata_mock(path, **args): + response_metadata = MagicMock() + response_metadata.status_code = 200 + if path == "meta-data/iam/security-credentials/": + response_metadata.text = "Role" + else: + response_metadata.json.return_value = { + "AccessKeyId": access_key_id, + "SecretAccessKey": secret_access_key, + "Token": token, + "Expiration": expiration, + } + return response_metadata + + patch_get_metadata = patch.object( + aws, "get_metadata", side_effect=handle_get_metadata_mock + ) + + assumed_access_key_id = "mock_assumed_AccessKeyId" + assumed_secret_access_key = "mock_assumed_SecretAccessKey" + assumed_session_token = "mock_assumed_SessionToken" + assumed_creds_ret = ( + assumed_access_key_id, + assumed_secret_access_key, + assumed_session_token, + ) + + patch_assumed_creds = patch.object( + aws, "assumed_creds", return_value=assumed_creds_ret + ) + + # test for the first call, with __Expiration__ = "" (default) + with patch_get_metadata as mock_get_metadata: + with patch_assumed_creds: + result = aws.creds( + {"id": aws.IROLE_CODE, "key": aws.IROLE_CODE, "role_arn": role_arn} + ) + assert mock_get_metadata.call_count == 2 + assert result == assumed_creds_ret + + # test for the second call, with valid __Expiration__ + with patch_get_metadata as mock_get_metadata: + with patch_expiration, patch_assumed_creds: + result = aws.creds( + {"id": aws.IROLE_CODE, "key": aws.IROLE_CODE, "role_arn": role_arn} + ) + assert mock_get_metadata.call_count == 0 + assert result == assumed_creds_ret diff --git a/tests/pytests/unit/utils/test_cloud.py b/tests/pytests/unit/utils/test_cloud.py index 550b63c9740d..0bfe6d28ce67 100644 --- a/tests/pytests/unit/utils/test_cloud.py +++ b/tests/pytests/unit/utils/test_cloud.py @@ -8,11 +8,20 @@ import os +import string import tempfile import pytest +try: + from smbprotocol.exceptions import CannotDelete + + HAS_PSEXEC = True +except ImportError: + HAS_PSEXEC = False + import salt.utils.cloud as cloud +from salt.exceptions import SaltCloudException from salt.utils.cloud import __ssh_gateway_arguments as ssh_gateway_arguments from tests.support.mock import MagicMock, patch @@ -74,7 +83,7 @@ def delete_password( os.chdir(old_cwd) -def test_ssh_password_regex(create_class): +def test_ssh_password_regex(): """Test matching ssh password patterns""" for pattern in ( "Password for root@127.0.0.1:", @@ -125,7 +134,7 @@ def test_retrieve_password_from_keyring(create_class): assert pw_in_keyring == "fake_password_c8231" -def test_sftp_file_with_content_under_python3(create_class): +def test_sftp_file_with_content_under_python3(): with pytest.raises(Exception) as context: cloud.sftp_file("/tmp/test", "ТЕSТ test content") # we successful pass the place with os.write(tmpfd, ... @@ -133,7 +142,7 @@ def test_sftp_file_with_content_under_python3(create_class): @pytest.mark.skip_on_windows(reason="Not applicable for Windows.") -def test_check_key_path_and_mode(create_class): +def test_check_key_path_and_mode(): with tempfile.NamedTemporaryFile() as f: key_file = f.name @@ -206,7 +215,8 @@ def test_deploy_windows_custom_port(): mock.assert_called_once_with("test", "Administrator", None, 1234) -def test_run_psexec_command_cleanup_lingering_paexec(): +@pytest.mark.skipif(not HAS_PSEXEC, reason="Missing SMB Protocol Library") +def test_run_psexec_command_cleanup_lingering_paexec(caplog): pytest.importorskip("pypsexec.client", reason="Requires PyPsExec") mock_psexec = patch("salt.utils.cloud.PsExecClient", autospec=True) mock_scmr = patch("salt.utils.cloud.ScmrService", autospec=True) @@ -230,11 +240,32 @@ def test_run_psexec_command_cleanup_lingering_paexec(): ) mock_client.return_value.cleanup.assert_called_once() + # Testing handling an error when it can't delete the PAexec binary + with mock_scmr, mock_rm_svc, mock_psexec as mock_client: + mock_client.return_value.session = MagicMock(username="Gary") + mock_client.return_value.connection = MagicMock(server_name="Krabbs") + mock_client.return_value.run_executable.return_value = ( + "Sandy", + "MermaidMan", + "BarnicleBoy", + ) + mock_client.return_value.cleanup = MagicMock(side_effect=CannotDelete()) + + cloud.run_psexec_command( + "spongebob", + "squarepants", + "patrick", + "squidward", + "plankton", + ) + assert "Exception cleaning up PAexec:" in caplog.text + mock_client.return_value.disconnect.assert_called_once() + @pytest.mark.skip_unless_on_windows(reason="Only applicable for Windows.") def test_deploy_windows_programdata(): """ - Test deploy_windows with a custom port + Test deploy_windows to ProgramData """ mock_true = MagicMock(return_value=True) mock_tuple = MagicMock(return_value=(0, 0, 0)) @@ -605,3 +636,168 @@ def test_deploy_script_ssh_timeout(): ssh_kwargs = root_cmd.call_args.kwargs assert "ssh_timeout" in ssh_kwargs assert ssh_kwargs["ssh_timeout"] == 34 + + +@pytest.mark.parametrize( + "master,expected", + [ + (None, None), + ("single_master", "single_master"), + (["master1", "master2", "master3"], "master1,master2,master3"), + ], +) +def test__format_master_param(master, expected): + result = cloud._format_master_param(master) + assert result == expected + + +@pytest.mark.skip_unless_on_windows(reason="Only applicable for Windows.") +@pytest.mark.parametrize( + "master,expected", + [ + (None, None), + ("single_master", "single_master"), + (["master1", "master2", "master3"], "master1,master2,master3"), + ], +) +def test_deploy_windows_master(master, expected): + """ + Test deploy_windows with master parameter + """ + mock_true = MagicMock(return_value=True) + mock_tuple = MagicMock(return_value=(0, 0, 0)) + with patch("salt.utils.smb.get_conn", MagicMock()), patch( + "salt.utils.smb.mkdirs", MagicMock() + ), patch("salt.utils.smb.put_file", MagicMock()), patch( + "salt.utils.smb.delete_file", MagicMock() + ), patch( + "salt.utils.smb.delete_directory", MagicMock() + ), patch( + "time.sleep", MagicMock() + ), patch.object( + cloud, "wait_for_port", mock_true + ), patch.object( + cloud, "fire_event", MagicMock() + ), patch.object( + cloud, "wait_for_psexecsvc", mock_true + ), patch.object( + cloud, "run_psexec_command", mock_tuple + ) as mock: + cloud.deploy_windows(host="test", win_installer="install.exe", master=master) + expected_cmd = "c:\\salttemp\\install.exe" + expected_args = "/S /master={} /minion-name=None".format(expected) + assert mock.call_args_list[0].args[0] == expected_cmd + assert mock.call_args_list[0].args[1] == expected_args + + +def test___ssh_gateway_config_dict(): + assert cloud.__ssh_gateway_config_dict(None) == {} + gate = { + "ssh_gateway": "Gozar", + "ssh_gateway_key": "Zuul", + "ssh_gateway_user": "Vinz Clortho", + "ssh_gateway_command": "Are you the keymaster?", + } + assert cloud.__ssh_gateway_config_dict(gate) == gate + + +def test_ip_to_int(): + assert cloud.ip_to_int("127.0.0.1") == 2130706433 + + +def test_is_public_ip(): + assert cloud.is_public_ip("8.8.8.8") is True + assert cloud.is_public_ip("127.0.0.1") is False + assert cloud.is_public_ip("172.17.3.1") is False + assert cloud.is_public_ip("192.168.30.4") is False + assert cloud.is_public_ip("10.145.1.1") is False + assert cloud.is_public_ip("fe80::123:ffff:ffff:ffff") is False + assert cloud.is_public_ip("2001:db8:3333:4444:CCCC:DDDD:EEEE:FFFF") is True + + +def test_check_name(): + try: + cloud.check_name("test", string.ascii_letters) + except SaltCloudException as exc: + assert False, f"cloud.check_name rasied SaltCloudException: {exc}" + + with pytest.raises(SaltCloudException): + cloud.check_name("test", string.digits) + + +def test__strip_cache_events(): + events = { + "test": "foobar", + "passwd": "fakepass", + } + events2 = {"test1": "foobar", "test2": "foobar"} + opts = {"cache_event_strip_fields": ["passwd"]} + assert cloud._strip_cache_events(events, opts) == {"test": "foobar"} + assert cloud._strip_cache_events(events2, opts) == events2 + + +def test_salt_cloud_force_asciii(): + try: + "\u0411".encode("iso-8859-15") + except UnicodeEncodeError as exc: + with pytest.raises(UnicodeEncodeError): + cloud._salt_cloud_force_ascii(exc) + + with pytest.raises(TypeError): + cloud._salt_cloud_force_ascii("not the thing") + + try: + "\xa0\u2013".encode("iso-8859-15") + except UnicodeEncodeError as exc: + assert cloud._salt_cloud_force_ascii(exc) == ("-", 2) + + +def test__unwrap_dict(): + assert cloud._unwrap_dict({"a": {"b": {"c": "foobar"}}}, "a,b,c") == "foobar" + + +def test_get_salt_interface(): + with patch( + "salt.config.get_cloud_config_value", + MagicMock(side_effect=[False, "public_ips"]), + ) as cloud_config: + assert cloud.get_salt_interface({}, {}) == "public_ips" + assert cloud_config.call_count == 2 + with patch( + "salt.config.get_cloud_config_value", MagicMock(return_value="private_ips") + ) as cloud_config: + assert cloud.get_salt_interface({}, {}) == "private_ips" + assert cloud_config.call_count == 1 + + +def test_userdata_template(): + assert cloud.userdata_template(opts=None, vm_=None, userdata=None) is None + with patch("salt.config.get_cloud_config_value", MagicMock(return_value=False)): + assert cloud.userdata_template(opts=None, vm_=None, userdata="test") == "test" + with patch("salt.config.get_cloud_config_value", MagicMock(return_value=None)): + opts = {"userdata_template": None} + assert cloud.userdata_template(opts=opts, vm_=None, userdata="test") == "test" + + renders = {"jinja": MagicMock(return_value="test")} + + with patch("salt.config.get_cloud_config_value", MagicMock(return_value="jinja")): + with patch("salt.loader.render", MagicMock(return_value=renders)): + opts = { + "userdata_template": "test", + "renderer_blacklist": None, + "renderer_whitelist": None, + "renderer": "jinja", + } + assert cloud.userdata_template(opts=opts, vm_={}, userdata="test") == "test" + + renders = {"jinja": MagicMock(return_value=True)} + + with patch("salt.config.get_cloud_config_value", MagicMock(return_value="jinja")): + with patch("salt.loader.render", MagicMock(return_value=renders)): + opts = { + "userdata_template": "test", + "renderer_blacklist": None, + "renderer_whitelist": None, + "renderer": "jinja", + } + assert cloud.userdata_template(opts=opts, vm_={}, userdata="test") == "True" diff --git a/tests/pytests/unit/utils/test_crypt.py b/tests/pytests/unit/utils/test_crypt.py index 8a9432c19c2d..9a7b35f3d2b8 100644 --- a/tests/pytests/unit/utils/test_crypt.py +++ b/tests/pytests/unit/utils/test_crypt.py @@ -1,7 +1,7 @@ """ Unit tests for salt.utils.crypt.py """ - +import pytest import salt.utils.crypt from tests.support.mock import patch @@ -29,8 +29,23 @@ HAS_CRYPTO = False +@pytest.fixture +def pub_key_data(): + return [ + "-----BEGIN PUBLIC KEY-----", + "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAyc9ehbU4J2uzPZZCEw8K", + "5URYcKSUh0h/c6m9PR2kRFbXkHcSnpkWX+LCuFKQ5iF2+0rVn9pO/94rL5zAQ6DU", + "lucqk9EvamSk+TjHh3Ps/HdSxxVbkLk3nglVJrDgENxnAz+Kp+OSNfI2uhhzJiu1", + "Dhn86Wb46eu7EFYeJ+7z9+29UXuCiMIUL5sRx3Xy37gpiD4Z+JVtoBNx1MKJ4MqB", + "24ZXsvtEyrCmuLwhKCiQqvNx91CkyIL+sfMoHDSf7sLwl1CuCEgny7EV7bJpoNzN", + "ZFKggcJCopfzLWDijF5A5OOvvvFrr/rYjW79LkGviWTzJrBPNgoD01zWIlzJfLdh", + "ywIDAQAB", + "-----END PUBLIC KEY-----", + ] + + def test_random(): - # make sure the right liberty is used for random + # make sure the right library is used for random if HAS_M2CRYPTO: assert None is salt.utils.crypt.Random elif HAS_CYPTODOME: @@ -40,7 +55,7 @@ def test_random(): def test_reinit_crypto(): - # make sure reinit cryptot does not crash + # make sure reinit crypto does not crash salt.utils.crypt.reinit_crypto() # make sure reinit does not crash when no crypt is found @@ -49,3 +64,32 @@ def test_reinit_crypto(): with patch("salt.utils.crypt.HAS_CRYPTO", False): with patch("salt.utils.crypt.Random", None): salt.utils.crypt.reinit_crypto() + + +@pytest.mark.parametrize("line_ending", ["\n", "\r\n"]) +def test_pem_finger_file_line_endings(tmp_path, pub_key_data, line_ending): + key_file = tmp_path / "master_crlf.pub" + key_file.write_bytes(line_ending.join(pub_key_data).encode("utf-8")) + finger = salt.utils.crypt.pem_finger(path=str(key_file)) + assert ( + finger + == "9b:42:66:92:8a:d1:b9:27:42:e0:6d:f3:12:c9:74:74:b0:e0:0e:42:83:87:62:ad:95:49:9d:6f:8e:d0:ed:35" + ) + + +@pytest.mark.parametrize("key", [b"123abc", "123abc"]) +def test_pem_finger_key(key): + finger = salt.utils.crypt.pem_finger(key=key) + assert ( + finger + == "dd:13:0a:84:9d:7b:29:e5:54:1b:05:d2:f7:f8:6a:4a:cd:4f:1e:c5:98:c1:c9:43:87:83:f5:6b:c4:f0:ff:80" + ) + + +def test_pem_finger_sha512(): + finger = salt.utils.crypt.pem_finger(key="123abc", sum_type="sha512") + assert ( + finger + == "7b:6a:d7:9b:34:6f:b6:95:12:75:34:39:48:e1:3c:1b:4e:bc:a8:2a:54:52:a6:c5:d1:56:84:37:7f:09:6c:a9:" + "27:50:6a:23:a8:47:e6:e0:46:06:13:99:63:1b:16:fc:28:20:c8:b0:e0:2d:0e:a8:7a:a5:a2:03:a7:7c:2a:7e" + ) diff --git a/tests/pytests/unit/utils/test_data.py b/tests/pytests/unit/utils/test_data.py index 61d834da7f17..5614a7703164 100644 --- a/tests/pytests/unit/utils/test_data.py +++ b/tests/pytests/unit/utils/test_data.py @@ -1,89 +1,1458 @@ +""" +Tests for salt.utils.data +""" +import builtins +import logging + import pytest import salt.utils.data +import salt.utils.stringutils +from salt.utils.odict import OrderedDict as SaltOrderedDict +from tests.support.mock import patch +from tests.support.unit import LOREM_IPSUM +log = logging.getLogger(__name__) -def test_get_value_simple_path(): - data = {"a": {"b": {"c": "foo"}}} - assert [{"value": "foo"}] == salt.utils.data.get_value(data, "a:b:c") +_b = lambda x: x.encode("utf-8") +_s = lambda x: salt.utils.stringutils.to_str(x, normalize=True) -def test_get_value_placeholder_dict(): - data = {"a": {"b": {"name": "foo"}, "c": {"name": "bar"}}} - assert [ - {"value": "foo", "id": "b"}, - {"value": "bar", "id": "c"}, - ] == salt.utils.data.get_value(data, "a:{id}:name") +@pytest.fixture +def get_BYTES(): + # Some randomized data that will not decode + return b"1\x814\x10" -def test_get_value_placeholder_list(): - data = {"a": [{"name": "foo"}, {"name": "bar"}]} - assert [ - {"value": "foo", "id": 0}, - {"value": "bar", "id": 1}, - ] == salt.utils.data.get_value(data, "a:{id}:name") +@pytest.fixture +def get_EGGS(): + # This is an example of a unicode string with й constructed using two separate + # code points. Do not modify it. + return "\u044f\u0438\u0306\u0446\u0430" + + +@pytest.fixture +def get_test_data(get_BYTES, get_EGGS): + return [ + "unicode_str", + _b("питон"), + 123, + 456.789, + True, + False, + None, + get_EGGS, + get_BYTES, + [123, 456.789, _b("спам"), True, False, None, get_EGGS, get_BYTES], + (987, 654.321, _b("яйца"), get_EGGS, None, (True, get_EGGS, get_BYTES)), + { + _b("str_key"): _b("str_val"), + None: True, + 123: 456.789, + get_EGGS: get_BYTES, + _b("subdict"): { + "unicode_key": get_EGGS, + _b("tuple"): (123, "hello", _b("world"), True, get_EGGS, get_BYTES), + _b("list"): [456, _b("спам"), False, get_EGGS, get_BYTES], + }, + }, + SaltOrderedDict([(_b("foo"), "bar"), (123, 456), (get_EGGS, get_BYTES)]), + ] + + +def test_sorted_ignorecase(): + test_list = ["foo", "Foo", "bar", "Bar"] + expected_list = ["bar", "Bar", "foo", "Foo"] + assert salt.utils.data.sorted_ignorecase(test_list) == expected_list -def test_get_value_nested_placeholder(): - data = { - "a": { - "b": {"b1": {"name": "foo1"}, "b2": {"name": "foo2"}}, - "c": {"c1": {"name": "bar"}}, +def test_mysql_to_dict(): + test_mysql_output = [ + "+----+------+-----------+------+---------+------+-------+------------------+", + "| Id | User | Host | db | Command | Time | State | Info " + " |", + "+----+------+-----------+------+---------+------+-------+------------------+", + "| 7 | root | localhost | NULL | Query | 0 | init | show" + " processlist |", + "+----+------+-----------+------+---------+------+-------+------------------+", + ] + + ret = salt.utils.data.mysql_to_dict(test_mysql_output, "Info") + expected_dict = { + "show processlist": { + "Info": "show processlist", + "db": "NULL", + "State": "init", + "Host": "localhost", + "Command": "Query", + "User": "root", + "Time": 0, + "Id": 7, } } - assert [ - {"value": "foo1", "id": "b", "sub": "b1"}, - {"value": "foo2", "id": "b", "sub": "b2"}, - {"value": "bar", "id": "c", "sub": "c1"}, - ] == salt.utils.data.get_value(data, "a:{id}:{sub}:name") + assert ret == expected_dict + + +def test_subdict_match(): + test_two_level_dict = {"foo": {"bar": "baz"}} + test_two_level_comb_dict = {"foo": {"bar": "baz:woz"}} + test_two_level_dict_and_list = { + "abc": ["def", "ghi", {"lorem": {"ipsum": [{"dolor": "sit"}]}}], + } + test_three_level_dict = {"a": {"b": {"c": "v"}}} + + assert salt.utils.data.subdict_match(test_two_level_dict, "foo:bar:baz") + # In test_two_level_comb_dict, 'foo:bar' corresponds to 'baz:woz', not + # 'baz'. This match should return False. + assert not salt.utils.data.subdict_match(test_two_level_comb_dict, "foo:bar:baz") + # This tests matching with the delimiter in the value part (in other + # words, that the path 'foo:bar' corresponds to the string 'baz:woz'). + assert salt.utils.data.subdict_match(test_two_level_comb_dict, "foo:bar:baz:woz") + # This would match if test_two_level_comb_dict['foo']['bar'] was equal + # to 'baz:woz:wiz', or if there was more deep nesting. But it does not, + # so this should return False. + assert not salt.utils.data.subdict_match( + test_two_level_comb_dict, "foo:bar:baz:woz:wiz" + ) + # This tests for cases when a key path corresponds to a list. The + # value part 'ghi' should be successfully matched as it is a member of + # the list corresponding to key path 'abc'. It is somewhat a + # duplication of a test within test_traverse_dict_and_list, but + # salt.utils.data.subdict_match() does more than just invoke + # salt.utils.traverse_list_and_dict() so this particular assertion is a + # sanity check. + assert salt.utils.data.subdict_match(test_two_level_dict_and_list, "abc:ghi") + # This tests the use case of a dict embedded in a list, embedded in a + # list, embedded in a dict. This is a rather absurd case, but it + # confirms that match recursion works properly. + assert salt.utils.data.subdict_match( + test_two_level_dict_and_list, "abc:lorem:ipsum:dolor:sit" + ) + # Test four level dict match for reference + assert salt.utils.data.subdict_match(test_three_level_dict, "a:b:c:v") + # Test regression in 2015.8 where 'a:c:v' would match 'a:b:c:v' + assert not salt.utils.data.subdict_match(test_three_level_dict, "a:c:v") + # Test wildcard match + assert salt.utils.data.subdict_match(test_three_level_dict, "a:*:c:v") + + +@pytest.mark.parametrize( + "wildcard", + [ + ("*:*:*:*"), + ("a:*:*:*"), + ("a:b:*:*"), + ("a:b:ç:*"), + ("a:b:*:d"), + ("a:*:ç:d"), + ("*:b:ç:d"), + ("*:*:ç:d"), + ("*:*:*:d"), + ("a:*:*:d"), + ("a:b:*:ef*"), + ("a:b:*:g*"), + ("a:b:*:j:*"), + ("a:b:*:j:k"), + ("a:b:*:*:k"), + ("a:b:*:*:*"), + ], +) +def test_subdict_match_with_wildcards(wildcard): + """ + Tests subdict matching when wildcards are used in the expression + """ + data = {"a": {"b": {"ç": "d", "é": ["eff", "gee", "8ch"], "ĩ": {"j": "k"}}}} + assert salt.utils.data.subdict_match(data, wildcard) + + +def test_traverse_dict(): + test_two_level_dict = {"foo": {"bar": "baz"}} + + assert {"not_found": "nope"} == salt.utils.data.traverse_dict( + test_two_level_dict, "foo:bar:baz", {"not_found": "nope"} + ) + assert "baz" == salt.utils.data.traverse_dict( + test_two_level_dict, "foo:bar", {"not_found": "not_found"} + ) + + +def test_traverse_dict_and_list(): + test_two_level_dict = {"foo": {"bar": "baz"}} + test_two_level_dict_and_list = { + "foo": ["bar", "baz", {"lorem": {"ipsum": [{"dolor": "sit"}]}}] + } + + # Check traversing too far: salt.utils.data.traverse_dict_and_list() returns + # the value corresponding to a given key path, and baz is a value + # corresponding to the key path foo:bar. + assert {"not_found": "nope"} == salt.utils.data.traverse_dict_and_list( + test_two_level_dict, "foo:bar:baz", {"not_found": "nope"} + ) + # Now check to ensure that foo:bar corresponds to baz + assert "baz" == salt.utils.data.traverse_dict_and_list( + test_two_level_dict, "foo:bar", {"not_found": "not_found"} + ) + # Check traversing too far + assert {"not_found": "nope"} == salt.utils.data.traverse_dict_and_list( + test_two_level_dict_and_list, "foo:bar", {"not_found": "nope"} + ) + # Check index 1 (2nd element) of list corresponding to path 'foo' + assert "baz" == salt.utils.data.traverse_dict_and_list( + test_two_level_dict_and_list, "foo:1", {"not_found": "not_found"} + ) + # Traverse a couple times into dicts embedded in lists + assert "sit" == salt.utils.data.traverse_dict_and_list( + test_two_level_dict_and_list, + "foo:lorem:ipsum:dolor", + {"not_found": "not_found"}, + ) + + # Traverse and match integer key in a nested dict + # https://github.com/saltstack/salt/issues/56444 + assert "it worked" == salt.utils.data.traverse_dict_and_list( + {"foo": {1234: "it worked"}}, + "foo:1234", + "it didn't work", + ) + # Make sure that we properly return the default value when the initial + # attempt fails and YAML-loading the target key doesn't change its + # value. + assert "default" == salt.utils.data.traverse_dict_and_list( + {"foo": {"baz": "didn't work"}}, + "foo:bar", + "default", + ) + + +def test_issue_39709(): + test_two_level_dict_and_list = { + "foo": ["bar", "baz", {"lorem": {"ipsum": [{"dolor": "sit"}]}}] + } + + assert "sit" == salt.utils.data.traverse_dict_and_list( + test_two_level_dict_and_list, + ["foo", "lorem", "ipsum", "dolor"], + {"not_found": "not_found"}, + ) + + +def test_compare_dicts(): + ret = salt.utils.data.compare_dicts(old={"foo": "bar"}, new={"foo": "bar"}) + assert ret == {} + + ret = salt.utils.data.compare_dicts(old={"foo": "bar"}, new={"foo": "woz"}) + expected_ret = {"foo": {"new": "woz", "old": "bar"}} + assert ret == expected_ret + + +def test_compare_lists_no_change(): + ret = salt.utils.data.compare_lists( + old=[1, 2, 3, "a", "b", "c"], new=[1, 2, 3, "a", "b", "c"] + ) + expected = {} + assert ret == expected + + +def test_compare_lists_changes(): + ret = salt.utils.data.compare_lists( + old=[1, 2, 3, "a", "b", "c"], new=[1, 2, 4, "x", "y", "z"] + ) + expected = {"new": [4, "x", "y", "z"], "old": [3, "a", "b", "c"]} + assert ret == expected + + +def test_compare_lists_changes_new(): + ret = salt.utils.data.compare_lists(old=[1, 2, 3], new=[1, 2, 3, "x", "y", "z"]) + expected = {"new": ["x", "y", "z"]} + assert ret == expected + + +def test_compare_lists_changes_old(): + ret = salt.utils.data.compare_lists(old=[1, 2, 3, "a", "b", "c"], new=[1, 2, 3]) + expected = {"old": ["a", "b", "c"]} + assert ret == expected + + +def test_decode(get_test_data, get_BYTES, get_EGGS): + """ + Companion to test_decode_to_str, they should both be kept up-to-date + with one another. + + NOTE: This uses the lambda "_b" defined above in the global scope, + which encodes a string to a bytestring, assuming utf-8. + """ + expected = [ + "unicode_str", + "питон", + 123, + 456.789, + True, + False, + None, + "яйца", + get_BYTES, + [123, 456.789, "спам", True, False, None, "яйца", get_BYTES], + (987, 654.321, "яйца", "яйца", None, (True, "яйца", get_BYTES)), + { + "str_key": "str_val", + None: True, + 123: 456.789, + "яйца": get_BYTES, + "subdict": { + "unicode_key": "яйца", + "tuple": (123, "hello", "world", True, "яйца", get_BYTES), + "list": [456, "спам", False, "яйца", get_BYTES], + }, + }, + SaltOrderedDict([("foo", "bar"), (123, 456), ("яйца", get_BYTES)]), + ] + + ret = salt.utils.data.decode( + get_test_data, + keep=True, + normalize=True, + preserve_dict_class=True, + preserve_tuples=True, + ) + assert ret == expected + + # The binary data in the data structure should fail to decode, even + # using the fallback, and raise an exception. + pytest.raises( + UnicodeDecodeError, + salt.utils.data.decode, + get_test_data, + keep=False, + normalize=True, + preserve_dict_class=True, + preserve_tuples=True, + ) + + # Now munge the expected data so that we get what we would expect if we + # disable preservation of dict class and tuples + expected[10] = [987, 654.321, "яйца", "яйца", None, [True, "яйца", get_BYTES]] + expected[11]["subdict"]["tuple"] = [123, "hello", "world", True, "яйца", get_BYTES] + expected[12] = {"foo": "bar", 123: 456, "яйца": get_BYTES} + + ret = salt.utils.data.decode( + get_test_data, + keep=True, + normalize=True, + preserve_dict_class=False, + preserve_tuples=False, + ) + assert ret == expected + + # Now test single non-string, non-data-structure items, these should + # return the same value when passed to this function + for item in (123, 4.56, True, False, None): + log.debug("Testing decode of %s", item) + assert salt.utils.data.decode(item) == item + + # Test single strings (not in a data structure) + assert salt.utils.data.decode("foo") == "foo" + assert salt.utils.data.decode(_b("bar")) == "bar" + assert salt.utils.data.decode(get_EGGS, normalize=True) == "яйца" + assert salt.utils.data.decode(get_EGGS, normalize=False) == get_EGGS + + # Test binary blob + assert salt.utils.data.decode(get_BYTES, keep=True) == get_BYTES + pytest.raises(UnicodeDecodeError, salt.utils.data.decode, get_BYTES, keep=False) + + +def test_circular_refs_dicts(): + test_dict = {"key": "value", "type": "test1"} + test_dict["self"] = test_dict + ret = salt.utils.data._remove_circular_refs(ob=test_dict) + assert ret == {"key": "value", "type": "test1", "self": None} + + +def test_circular_refs_lists(): + test_list = { + "foo": [], + } + test_list["foo"].append((test_list,)) + ret = salt.utils.data._remove_circular_refs(ob=test_list) + assert ret == {"foo": [(None,)]} + + +def test_circular_refs_tuple(): + test_dup = {"foo": "string 1", "bar": "string 1", "ham": 1, "spam": 1} + ret = salt.utils.data._remove_circular_refs(ob=test_dup) + assert ret == {"foo": "string 1", "bar": "string 1", "ham": 1, "spam": 1} + + +def test_decode_to_str(get_test_data, get_BYTES): + """ + Companion to test_decode, they should both be kept up-to-date with one + another. + + NOTE: This uses the lambda "_s" defined above in the global scope, + which converts the string/bytestring to a str type. + """ + expected = [ + _s("unicode_str"), + _s("питон"), + 123, + 456.789, + True, + False, + None, + _s("яйца"), + get_BYTES, + [123, 456.789, _s("спам"), True, False, None, _s("яйца"), get_BYTES], + (987, 654.321, _s("яйца"), _s("яйца"), None, (True, _s("яйца"), get_BYTES)), + { + _s("str_key"): _s("str_val"), + None: True, + 123: 456.789, + _s("яйца"): get_BYTES, + _s("subdict"): { + _s("unicode_key"): _s("яйца"), + _s("tuple"): ( + 123, + _s("hello"), + _s("world"), + True, + _s("яйца"), + get_BYTES, + ), + _s("list"): [456, _s("спам"), False, _s("яйца"), get_BYTES], + }, + }, + SaltOrderedDict([(_s("foo"), _s("bar")), (123, 456), (_s("яйца"), get_BYTES)]), + ] + + ret = salt.utils.data.decode( + get_test_data, + keep=True, + normalize=True, + preserve_dict_class=True, + preserve_tuples=True, + to_str=True, + ) + assert ret == expected + + # The binary data in the data structure should fail to decode, even + # using the fallback, and raise an exception. + pytest.raises( + UnicodeDecodeError, + salt.utils.data.decode, + get_test_data, + keep=False, + normalize=True, + preserve_dict_class=True, + preserve_tuples=True, + to_str=True, + ) + + # Now munge the expected data so that we get what we would expect if we + # disable preservation of dict class and tuples + expected[10] = [ + 987, + 654.321, + _s("яйца"), + _s("яйца"), + None, + [True, _s("яйца"), get_BYTES], + ] + expected[11][_s("subdict")][_s("tuple")] = [ + 123, + _s("hello"), + _s("world"), + True, + _s("яйца"), + get_BYTES, + ] + expected[12] = {_s("foo"): _s("bar"), 123: 456, _s("яйца"): get_BYTES} + ret = salt.utils.data.decode( + get_test_data, + keep=True, + normalize=True, + preserve_dict_class=False, + preserve_tuples=False, + to_str=True, + ) + assert ret == expected -def test_get_value_nested_notfound(): - data = {"a": {"b": {"c": "foo"}}} - assert [{"value": []}] == salt.utils.data.get_value(data, "a:b:d", []) + # Now test single non-string, non-data-structure items, these should + # return the same value when passed to this function + for item in (123, 4.56, True, False, None): + log.debug("Testing decode of %s", item) + assert salt.utils.data.decode(item, to_str=True) == item + # Test single strings (not in a data structure) + assert salt.utils.data.decode("foo", to_str=True) == _s("foo") + assert salt.utils.data.decode(_b("bar"), to_str=True) == _s("bar") -def test_get_value_not_found(): - assert [{"value": []}] == salt.utils.data.get_value({}, "a", []) + # Test binary blob + assert salt.utils.data.decode(get_BYTES, keep=True, to_str=True) == get_BYTES + pytest.raises( + UnicodeDecodeError, + salt.utils.data.decode, + get_BYTES, + keep=False, + to_str=True, + ) -def test_get_value_none(): - assert [{"value": None}] == salt.utils.data.get_value({"a": None}, "a") +def test_decode_fallback(): + """ + Test fallback to utf-8 + """ + with patch.object(builtins, "__salt_system_encoding__", "ascii"): + assert salt.utils.data.decode(_b("яйца")) == "яйца" + + +def test_encode(get_test_data, get_BYTES, get_EGGS): + """ + NOTE: This uses the lambda "_b" defined above in the global scope, + which encodes a string to a bytestring, assuming utf-8. + """ + expected = [ + _b("unicode_str"), + _b("питон"), + 123, + 456.789, + True, + False, + None, + _b(get_EGGS), + get_BYTES, + [123, 456.789, _b("спам"), True, False, None, _b(get_EGGS), get_BYTES], + (987, 654.321, _b("яйца"), _b(get_EGGS), None, (True, _b(get_EGGS), get_BYTES)), + { + _b("str_key"): _b("str_val"), + None: True, + 123: 456.789, + _b(get_EGGS): get_BYTES, + _b("subdict"): { + _b("unicode_key"): _b(get_EGGS), + _b("tuple"): ( + 123, + _b("hello"), + _b("world"), + True, + _b(get_EGGS), + get_BYTES, + ), + _b("list"): [456, _b("спам"), False, _b(get_EGGS), get_BYTES], + }, + }, + SaltOrderedDict( + [(_b("foo"), _b("bar")), (123, 456), (_b(get_EGGS), get_BYTES)] + ), + ] + + # Both keep=True and keep=False should work because the get_BYTES data is + # already bytes. + ret = salt.utils.data.encode( + get_test_data, keep=True, preserve_dict_class=True, preserve_tuples=True + ) + assert ret == expected + ret = salt.utils.data.encode( + get_test_data, keep=False, preserve_dict_class=True, preserve_tuples=True + ) + assert ret == expected + # Now munge the expected data so that we get what we would expect if we + # disable preservation of dict class and tuples + expected[10] = [ + 987, + 654.321, + _b("яйца"), + _b(get_EGGS), + None, + [True, _b(get_EGGS), get_BYTES], + ] + expected[11][_b("subdict")][_b("tuple")] = [ + 123, + _b("hello"), + _b("world"), + True, + _b(get_EGGS), + get_BYTES, + ] + expected[12] = {_b("foo"): _b("bar"), 123: 456, _b(get_EGGS): get_BYTES} -def test_get_value_simple_type_path(): - assert [{"value": []}] == salt.utils.data.get_value({"a": 1024}, "a:b", []) + ret = salt.utils.data.encode( + get_test_data, keep=True, preserve_dict_class=False, preserve_tuples=False + ) + assert ret == expected + ret = salt.utils.data.encode( + get_test_data, keep=False, preserve_dict_class=False, preserve_tuples=False + ) + assert ret == expected + # Now test single non-string, non-data-structure items, these should + # return the same value when passed to this function + for item in (123, 4.56, True, False, None): + log.debug("Testing encode of %s", item) + assert salt.utils.data.encode(item) == item -def test_get_value_None_path(): - assert [{"value": None}] == salt.utils.data.get_value({"a": None}, "a:b", []) + # Test single strings (not in a data structure) + assert salt.utils.data.encode("foo") == _b("foo") + assert salt.utils.data.encode(_b("bar")) == _b("bar") + # Test binary blob, nothing should happen even when keep=False since + # the data is already bytes + assert salt.utils.data.encode(get_BYTES, keep=True) == get_BYTES + assert salt.utils.data.encode(get_BYTES, keep=False) == get_BYTES -def test_flatten_recursion_error(): + +def test_encode_keep(): """ - Test the flatten function for reference cycle detection + Whereas we tested the keep argument in test_decode, it is much easier + to do a more comprehensive test of keep in its own function where we + can force the encoding. """ - data = [1, 2, 3, [4]] - data.append(data) - with pytest.raises(RecursionError) as err: - salt.utils.data.flatten(data) - assert str(err.value) == "Reference cycle detected. Check input list." + unicode_str = "питон" + encoding = "ascii" + + # Test single string + assert salt.utils.data.encode(unicode_str, encoding, keep=True) == unicode_str + pytest.raises( + UnicodeEncodeError, + salt.utils.data.encode, + unicode_str, + encoding, + keep=False, + ) + data = [ + unicode_str, + [b"foo", [unicode_str], {b"key": unicode_str}, (unicode_str,)], + { + b"list": [b"foo", unicode_str], + b"dict": {b"key": unicode_str}, + b"tuple": (b"foo", unicode_str), + }, + ([b"foo", unicode_str], {b"key": unicode_str}, (unicode_str,)), + ] -def test_sample(): - lst = ["one", "two", "three", "four"] - assert len(salt.utils.data.sample(lst, 0)) == 0 - assert len(salt.utils.data.sample(lst, 2)) == 2 - pytest.raises(ValueError, salt.utils.data.sample, lst, 5) - assert salt.utils.data.sample(lst, 2, seed="static") == ["four", "two"] + # Since everything was a bytestring aside from the bogus data, the + # return data should be identical. We don't need to test recursive + # decoding, that has already been tested in test_encode. + assert ( + salt.utils.data.encode(data, encoding, keep=True, preserve_tuples=True) == data + ) + pytest.raises( + UnicodeEncodeError, + salt.utils.data.encode, + data, + encoding, + keep=False, + preserve_tuples=True, + ) + for index, _ in enumerate(data): + assert ( + salt.utils.data.encode( + data[index], encoding, keep=True, preserve_tuples=True + ) + == data[index] + ) + pytest.raises( + UnicodeEncodeError, + salt.utils.data.encode, + data[index], + encoding, + keep=False, + preserve_tuples=True, + ) -def test_shuffle(): - lst = ["one", "two", "three", "four"] - assert len(salt.utils.data.shuffle(lst)) == 4 - assert salt.utils.data.shuffle(lst, seed="static") == [ - "four", + +def test_encode_fallback(): + """ + Test fallback to utf-8 + """ + with patch.object(builtins, "__salt_system_encoding__", "ascii"): + assert salt.utils.data.encode("яйца") == _b("яйца") + with patch.object(builtins, "__salt_system_encoding__", "CP1252"): + assert salt.utils.data.encode("Ψ") == _b("Ψ") + + +def test_repack_dict(): + list_of_one_element_dicts = [ + {"dict_key_1": "dict_val_1"}, + {"dict_key_2": "dict_val_2"}, + {"dict_key_3": "dict_val_3"}, + ] + expected_ret = { + "dict_key_1": "dict_val_1", + "dict_key_2": "dict_val_2", + "dict_key_3": "dict_val_3", + } + ret = salt.utils.data.repack_dictlist(list_of_one_element_dicts) + assert ret == expected_ret + + # Try with yaml + yaml_key_val_pair = "- key1: val1" + ret = salt.utils.data.repack_dictlist(yaml_key_val_pair) + assert ret == {"key1": "val1"} + + # Make sure we handle non-yaml junk data + ret = salt.utils.data.repack_dictlist(LOREM_IPSUM) + assert ret == {} + + +def test_stringify(): + pytest.raises(TypeError, salt.utils.data.stringify, 9) + assert salt.utils.data.stringify(["one", "two", "three", 4, 5]) == [ + "one", "two", "three", - "one", + "4", + "5", ] + + +def test_json_query(): + # Raises exception if jmespath module is not found + with patch("salt.utils.data.jmespath", None): + with pytest.raises(RuntimeError, match="requires jmespath"): + salt.utils.data.json_query({}, "@") + + # Test search + user_groups = { + "user1": {"groups": ["group1", "group2", "group3"]}, + "user2": {"groups": ["group1", "group2"]}, + "user3": {"groups": ["group3"]}, + } + expression = "*.groups[0]" + primary_groups = ["group1", "group1", "group3"] + assert sorted(salt.utils.data.json_query(user_groups, expression)) == primary_groups + + +def test_nop(): + """ + Test cases where nothing will be done. + """ + # Test with dictionary without recursion + old_dict = { + "foo": "bar", + "bar": {"baz": {"qux": "quux"}}, + "baz": ["qux", {"foo": "bar"}], + } + new_dict = salt.utils.data.filter_falsey(old_dict) + assert old_dict == new_dict + # Check returned type equality + assert type(old_dict) is type(new_dict) + # Test dictionary with recursion + new_dict = salt.utils.data.filter_falsey(old_dict, recurse_depth=3) + assert old_dict == new_dict + # Test with list + old_list = ["foo", "bar"] + new_list = salt.utils.data.filter_falsey(old_list) + assert old_list == new_list + # Check returned type equality + assert type(old_list) is type(new_list) + # Test with set + old_set = {"foo", "bar"} + new_set = salt.utils.data.filter_falsey(old_set) + assert old_set == new_set + # Check returned type equality + assert type(old_set) is type(new_set) + # Test with SaltOrderedDict + old_dict = SaltOrderedDict( + [ + ("foo", "bar"), + ("bar", SaltOrderedDict([("qux", "quux")])), + ("baz", ["qux", SaltOrderedDict([("foo", "bar")])]), + ] + ) + new_dict = salt.utils.data.filter_falsey(old_dict) + assert old_dict == new_dict + assert type(old_dict) is type(new_dict) + # Test excluding int + old_list = [0] + new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[int]) + assert old_list == new_list + # Test excluding str (or unicode) (or both) + old_list = [""] + new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[str]) + assert old_list == new_list + # Test excluding list + old_list = [[]] + new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[type([])]) + assert old_list == new_list + # Test excluding dict + old_list = [{}] + new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[type({})]) + assert old_list == new_list + + +def test_filter_dict_no_recurse(): + """ + Test filtering a dictionary without recursing. + This will only filter out key-values where the values are falsey. + """ + old_dict = { + "foo": None, + "bar": {"baz": {"qux": None, "quux": "", "foo": []}}, + "baz": ["qux"], + "qux": {}, + "quux": [], + } + new_dict = salt.utils.data.filter_falsey(old_dict) + expect_dict = { + "bar": {"baz": {"qux": None, "quux": "", "foo": []}}, + "baz": ["qux"], + } + assert expect_dict == new_dict + assert type(expect_dict) is type(new_dict) + + +def test_filter_dict_recurse(): + """ + Test filtering a dictionary with recursing. + This will filter out any key-values where the values are falsey or when + the values *become* falsey after filtering their contents (in case they + are lists or dicts). + """ + old_dict = { + "foo": None, + "bar": {"baz": {"qux": None, "quux": "", "foo": []}}, + "baz": ["qux"], + "qux": {}, + "quux": [], + } + new_dict = salt.utils.data.filter_falsey(old_dict, recurse_depth=3) + expect_dict = {"baz": ["qux"]} + assert expect_dict == new_dict + assert type(expect_dict) is type(new_dict) + + +def test_filter_list_no_recurse(): + """ + Test filtering a list without recursing. + This will only filter out items which are falsey. + """ + old_list = ["foo", None, [], {}, 0, ""] + new_list = salt.utils.data.filter_falsey(old_list) + expect_list = ["foo"] + assert expect_list == new_list + assert type(expect_list) is type(new_list) + # Ensure nested values are *not* filtered out. + old_list = [ + "foo", + ["foo"], + ["foo", None], + {"foo": 0}, + {"foo": "bar", "baz": []}, + [{"foo": ""}], + ] + new_list = salt.utils.data.filter_falsey(old_list) + assert old_list == new_list + assert type(old_list) is type(new_list) + + +def test_filter_list_recurse(): + """ + Test filtering a list with recursing. + This will filter out any items which are falsey, or which become falsey + after filtering their contents (in case they are lists or dicts). + """ + old_list = [ + "foo", + ["foo"], + ["foo", None], + {"foo": 0}, + {"foo": "bar", "baz": []}, + [{"foo": ""}], + ] + new_list = salt.utils.data.filter_falsey(old_list, recurse_depth=3) + expect_list = ["foo", ["foo"], ["foo"], {"foo": "bar"}] + assert expect_list == new_list + assert type(expect_list) is type(new_list) + + +def test_filter_set_no_recurse(): + """ + Test filtering a set without recursing. + Note that a set cannot contain unhashable types, so recursion is not possible. + """ + old_set = {"foo", None, 0, ""} + new_set = salt.utils.data.filter_falsey(old_set) + expect_set = {"foo"} + assert expect_set == new_set + assert type(expect_set) is type(new_set) + + +def test_filter_ordereddict_no_recurse(): + """ + Test filtering an SaltOrderedDict without recursing. + """ + old_dict = SaltOrderedDict( + [ + ("foo", None), + ( + "bar", + SaltOrderedDict( + [ + ( + "baz", + SaltOrderedDict([("qux", None), ("quux", ""), ("foo", [])]), + ) + ] + ), + ), + ("baz", ["qux"]), + ("qux", {}), + ("quux", []), + ] + ) + new_dict = salt.utils.data.filter_falsey(old_dict) + expect_dict = SaltOrderedDict( + [ + ( + "bar", + SaltOrderedDict( + [ + ( + "baz", + SaltOrderedDict([("qux", None), ("quux", ""), ("foo", [])]), + ) + ] + ), + ), + ("baz", ["qux"]), + ] + ) + assert expect_dict == new_dict + assert type(expect_dict) is type(new_dict) + + +def test_filter_ordereddict_recurse(): + """ + Test filtering an SaltOrderedDict with recursing. + """ + old_dict = SaltOrderedDict( + [ + ("foo", None), + ( + "bar", + SaltOrderedDict( + [ + ( + "baz", + SaltOrderedDict([("qux", None), ("quux", ""), ("foo", [])]), + ) + ] + ), + ), + ("baz", ["qux"]), + ("qux", {}), + ("quux", []), + ] + ) + new_dict = salt.utils.data.filter_falsey(old_dict, recurse_depth=3) + expect_dict = SaltOrderedDict([("baz", ["qux"])]) + assert expect_dict == new_dict + assert type(expect_dict) is type(new_dict) + + +def test_filter_list_recurse_limit(): + """ + Test filtering a list with recursing, but with a limited depth. + Note that the top-level is always processed, so a recursion depth of 2 + means that two *additional* levels are processed. + """ + old_list = [None, [None, [None, [None]]]] + new_list = salt.utils.data.filter_falsey(old_list, recurse_depth=2) + assert [[[[None]]]] == new_list + + +def test_filter_dict_recurse_limit(): + """ + Test filtering a dict with recursing, but with a limited depth. + Note that the top-level is always processed, so a recursion depth of 2 + means that two *additional* levels are processed. + """ + old_dict = { + "one": None, + "foo": {"two": None, "bar": {"three": None, "baz": {"four": None}}}, + } + new_dict = salt.utils.data.filter_falsey(old_dict, recurse_depth=2) + assert {"foo": {"bar": {"baz": {"four": None}}}} == new_dict + + +def test_filter_exclude_types(): + """ + Test filtering a list recursively, but also ignoring (i.e. not filtering) + out certain types that can be falsey. + """ + # Ignore int, unicode + old_list = [ + "foo", + ["foo"], + ["foo", None], + {"foo": 0}, + {"foo": "bar", "baz": []}, + [{"foo": ""}], + ] + new_list = salt.utils.data.filter_falsey( + old_list, recurse_depth=3, ignore_types=[int, str] + ) + assert [ + "foo", + ["foo"], + ["foo"], + {"foo": 0}, + {"foo": "bar"}, + [{"foo": ""}], + ] == new_list + # Ignore list + old_list = [ + "foo", + ["foo"], + ["foo", None], + {"foo": 0}, + {"foo": "bar", "baz": []}, + [{"foo": ""}], + ] + new_list = salt.utils.data.filter_falsey( + old_list, recurse_depth=3, ignore_types=[type([])] + ) + assert ["foo", ["foo"], ["foo"], {"foo": "bar", "baz": []}, []] == new_list + # Ignore dict + old_list = [ + "foo", + ["foo"], + ["foo", None], + {"foo": 0}, + {"foo": "bar", "baz": []}, + [{"foo": ""}], + ] + new_list = salt.utils.data.filter_falsey( + old_list, recurse_depth=3, ignore_types=[type({})] + ) + assert ["foo", ["foo"], ["foo"], {}, {"foo": "bar"}, [{}]] == new_list + # Ignore NoneType + old_list = [ + "foo", + ["foo"], + ["foo", None], + {"foo": 0}, + {"foo": "bar", "baz": []}, + [{"foo": ""}], + ] + new_list = salt.utils.data.filter_falsey( + old_list, recurse_depth=3, ignore_types=[type(None)] + ) + assert ["foo", ["foo"], ["foo", None], {"foo": "bar"}] == new_list + + +def test_list_equality(): + """ + Test cases where equal lists are compared. + """ + test_list = [0, 1, 2] + assert {} == salt.utils.data.recursive_diff(test_list, test_list) + + test_list = [[0], [1], [0, 1, 2]] + assert {} == salt.utils.data.recursive_diff(test_list, test_list) + + +def test_dict_equality(): + """ + Test cases where equal dicts are compared. + """ + test_dict = {"foo": "bar", "bar": {"baz": {"qux": "quux"}}, "frop": 0} + assert {} == salt.utils.data.recursive_diff(test_dict, test_dict) + + +def test_ordereddict_equality(): + """ + Test cases where equal SaltOrderedDicts are compared. + """ + test_dict = SaltOrderedDict( + [ + ("foo", "bar"), + ("bar", SaltOrderedDict([("baz", SaltOrderedDict([("qux", "quux")]))])), + ("frop", 0), + ] + ) + assert {} == salt.utils.data.recursive_diff(test_dict, test_dict) + + +def test_mixed_equality(): + """ + Test cases where mixed nested lists and dicts are compared. + """ + test_data = { + "foo": "bar", + "baz": [0, 1, 2], + "bar": {"baz": [{"qux": "quux"}, {"froop", 0}]}, + } + assert {} == salt.utils.data.recursive_diff(test_data, test_data) + + +def test_set_equality(): + """ + Test cases where equal sets are compared. + """ + test_set = {0, 1, 2, 3, "foo"} + assert {} == salt.utils.data.recursive_diff(test_set, test_set) + + # This is a bit of an oddity, as python seems to sort the sets in memory + # so both sets end up with the same ordering (0..3). + set_one = {0, 1, 2, 3} + set_two = {3, 2, 1, 0} + assert {} == salt.utils.data.recursive_diff(set_one, set_two) + + +def test_tuple_equality(): + """ + Test cases where equal tuples are compared. + """ + test_tuple = (0, 1, 2, 3, "foo") + assert {} == salt.utils.data.recursive_diff(test_tuple, test_tuple) + + +def test_list_inequality(): + """ + Test cases where two inequal lists are compared. + """ + list_one = [0, 1, 2] + list_two = ["foo", "bar", "baz"] + expected_result = {"old": list_one, "new": list_two} + assert expected_result == salt.utils.data.recursive_diff(list_one, list_two) + expected_result = {"new": list_one, "old": list_two} + assert expected_result == salt.utils.data.recursive_diff(list_two, list_one) + + list_one = [0, "foo", 1, "bar"] + list_two = [1, "foo", 1, "qux"] + expected_result = {"old": [0, "bar"], "new": [1, "qux"]} + assert expected_result == salt.utils.data.recursive_diff(list_one, list_two) + expected_result = {"new": [0, "bar"], "old": [1, "qux"]} + assert expected_result == salt.utils.data.recursive_diff(list_two, list_one) + + list_one = [0, 1, [2, 3]] + list_two = [0, 1, ["foo", "bar"]] + expected_result = {"old": [[2, 3]], "new": [["foo", "bar"]]} + assert expected_result == salt.utils.data.recursive_diff(list_one, list_two) + expected_result = {"new": [[2, 3]], "old": [["foo", "bar"]]} + assert expected_result == salt.utils.data.recursive_diff(list_two, list_one) + + +def test_dict_inequality(): + """ + Test cases where two inequal dicts are compared. + """ + dict_one = {"foo": 1, "bar": 2, "baz": 3} + dict_two = {"foo": 2, 1: "bar", "baz": 3} + expected_result = {"old": {"foo": 1, "bar": 2}, "new": {"foo": 2, 1: "bar"}} + assert expected_result == salt.utils.data.recursive_diff(dict_one, dict_two) + expected_result = {"new": {"foo": 1, "bar": 2}, "old": {"foo": 2, 1: "bar"}} + assert expected_result == salt.utils.data.recursive_diff(dict_two, dict_one) + + dict_one = {"foo": {"bar": {"baz": 1}}} + dict_two = {"foo": {"qux": {"baz": 1}}} + expected_result = {"old": dict_one, "new": dict_two} + assert expected_result == salt.utils.data.recursive_diff(dict_one, dict_two) + expected_result = {"new": dict_one, "old": dict_two} + assert expected_result == salt.utils.data.recursive_diff(dict_two, dict_one) + + +def test_ordereddict_inequality(): + """ + Test cases where two inequal SaltOrderedDicts are compared. + """ + odict_one = SaltOrderedDict([("foo", "bar"), ("bar", "baz")]) + odict_two = SaltOrderedDict([("bar", "baz"), ("foo", "bar")]) + expected_result = {"old": odict_one, "new": odict_two} + assert expected_result == salt.utils.data.recursive_diff(odict_one, odict_two) + + +def test_set_inequality(): + """ + Test cases where two inequal sets are compared. + Tricky as the sets are compared zipped, so shuffled sets of equal values + are considered different. + """ + set_one = {0, 1, 2, 4} + set_two = {0, 1, 3, 4} + expected_result = {"old": {2}, "new": {3}} + assert expected_result == salt.utils.data.recursive_diff(set_one, set_two) + expected_result = {"new": {2}, "old": {3}} + assert expected_result == salt.utils.data.recursive_diff(set_two, set_one) + + # It is unknown how different python versions will store sets in memory. + # Python 2.7 seems to sort it (i.e. set_one below becomes {0, 1, 'foo', 'bar'} + # However Python 3.6.8 stores it differently each run. + # So just test for "not equal" here. + set_one = {0, "foo", 1, "bar"} + set_two = {"foo", 1, "bar", 2} + expected_result = {} + assert expected_result != salt.utils.data.recursive_diff(set_one, set_two) + + +def test_mixed_inequality(): + """ + Test cases where two mixed dicts/iterables that are different are compared. + """ + dict_one = {"foo": [1, 2, 3]} + dict_two = {"foo": [3, 2, 1]} + expected_result = {"old": {"foo": [1, 3]}, "new": {"foo": [3, 1]}} + assert expected_result == salt.utils.data.recursive_diff(dict_one, dict_two) + expected_result = {"new": {"foo": [1, 3]}, "old": {"foo": [3, 1]}} + assert expected_result == salt.utils.data.recursive_diff(dict_two, dict_one) + + list_one = [1, 2, {"foo": ["bar", {"foo": 1, "bar": 2}]}] + list_two = [3, 4, {"foo": ["qux", {"foo": 1, "bar": 2}]}] + expected_result = { + "old": [1, 2, {"foo": ["bar"]}], + "new": [3, 4, {"foo": ["qux"]}], + } + assert expected_result == salt.utils.data.recursive_diff(list_one, list_two) + expected_result = { + "new": [1, 2, {"foo": ["bar"]}], + "old": [3, 4, {"foo": ["qux"]}], + } + assert expected_result == salt.utils.data.recursive_diff(list_two, list_one) + + mixed_one = {"foo": {0, 1, 2}, "bar": [0, 1, 2]} + mixed_two = {"foo": {1, 2, 3}, "bar": [1, 2, 3]} + expected_result = { + "old": {"foo": {0}, "bar": [0, 1, 2]}, + "new": {"foo": {3}, "bar": [1, 2, 3]}, + } + assert expected_result == salt.utils.data.recursive_diff(mixed_one, mixed_two) + expected_result = { + "new": {"foo": {0}, "bar": [0, 1, 2]}, + "old": {"foo": {3}, "bar": [1, 2, 3]}, + } + assert expected_result == salt.utils.data.recursive_diff(mixed_two, mixed_one) + + +def test_tuple_inequality(): + """ + Test cases where two tuples that are different are compared. + """ + tuple_one = (1, 2, 3) + tuple_two = (3, 2, 1) + expected_result = {"old": (1, 3), "new": (3, 1)} + assert expected_result == salt.utils.data.recursive_diff(tuple_one, tuple_two) + + +def test_list_vs_set(): + """ + Test case comparing a list with a set, will be compared unordered. + """ + mixed_one = [1, 2, 3] + mixed_two = {3, 2, 1} + expected_result = {} + assert expected_result == salt.utils.data.recursive_diff(mixed_one, mixed_two) + assert expected_result == salt.utils.data.recursive_diff(mixed_two, mixed_one) + + +def test_dict_vs_ordereddict(): + """ + Test case comparing a dict with an ordereddict, will be compared unordered. + """ + test_dict = {"foo": "bar", "bar": "baz"} + test_odict = SaltOrderedDict([("foo", "bar"), ("bar", "baz")]) + assert {} == salt.utils.data.recursive_diff(test_dict, test_odict) + assert {} == salt.utils.data.recursive_diff(test_odict, test_dict) + + test_odict2 = SaltOrderedDict([("bar", "baz"), ("foo", "bar")]) + assert {} == salt.utils.data.recursive_diff(test_dict, test_odict2) + assert {} == salt.utils.data.recursive_diff(test_odict2, test_dict) + + +def test_list_ignore_ignored(): + """ + Test case comparing two lists with ignore-list supplied (which is not used + when comparing lists). + """ + list_one = [1, 2, 3] + list_two = [3, 2, 1] + expected_result = {"old": [1, 3], "new": [3, 1]} + assert expected_result == salt.utils.data.recursive_diff( + list_one, list_two, ignore_keys=[1, 3] + ) + + +def test_dict_ignore(): + """ + Test case comparing two dicts with ignore-list supplied. + """ + dict_one = {"foo": 1, "bar": 2, "baz": 3} + dict_two = {"foo": 3, "bar": 2, "baz": 1} + expected_result = {"old": {"baz": 3}, "new": {"baz": 1}} + assert expected_result == salt.utils.data.recursive_diff( + dict_one, dict_two, ignore_keys=["foo"] + ) + + +def test_ordereddict_ignore(): + """ + Test case comparing two SaltOrderedDicts with ignore-list supplied. + """ + odict_one = SaltOrderedDict([("foo", 1), ("bar", 2), ("baz", 3)]) + odict_two = SaltOrderedDict([("baz", 1), ("bar", 2), ("foo", 3)]) + # The key 'foo' will be ignored, which means the key from the other SaltOrderedDict + # will always be considered "different" since SaltOrderedDicts are compared ordered. + expected_result = { + "old": SaltOrderedDict([("baz", 3)]), + "new": SaltOrderedDict([("baz", 1)]), + } + assert expected_result == salt.utils.data.recursive_diff( + odict_one, odict_two, ignore_keys=["foo"] + ) + + +def test_dict_vs_ordereddict_ignore(): + """ + Test case comparing a dict with an SaltOrderedDict with ignore-list supplied. + """ + dict_one = {"foo": 1, "bar": 2, "baz": 3} + odict_two = SaltOrderedDict([("foo", 3), ("bar", 2), ("baz", 1)]) + expected_result = {"old": {"baz": 3}, "new": SaltOrderedDict([("baz", 1)])} + assert expected_result == salt.utils.data.recursive_diff( + dict_one, odict_two, ignore_keys=["foo"] + ) + + +def test_mixed_nested_ignore(): + """ + Test case comparing mixed, nested items with ignore-list supplied. + """ + dict_one = {"foo": [1], "bar": {"foo": 1, "bar": 2}, "baz": 3} + dict_two = {"foo": [2], "bar": {"foo": 3, "bar": 2}, "baz": 1} + expected_result = {"old": {"baz": 3}, "new": {"baz": 1}} + assert expected_result == salt.utils.data.recursive_diff( + dict_one, dict_two, ignore_keys=["foo"] + ) + + +def test_ordered_dict_unequal_length(): + """ + Test case comparing two SaltOrderedDicts of unequal length. + """ + odict_one = SaltOrderedDict([("foo", 1), ("bar", 2), ("baz", 3)]) + odict_two = SaltOrderedDict([("foo", 1), ("bar", 2)]) + expected_result = {"old": SaltOrderedDict([("baz", 3)]), "new": {}} + assert expected_result == salt.utils.data.recursive_diff(odict_one, odict_two) + + +def test_list_unequal_length(): + """ + Test case comparing two lists of unequal length. + """ + list_one = [1, 2, 3] + list_two = [1, 2, 3, 4] + expected_result = {"old": [], "new": [4]} + assert expected_result == salt.utils.data.recursive_diff(list_one, list_two) + + +def test_set_unequal_length(): + """ + Test case comparing two sets of unequal length. + This does not do anything special, as it is unordered. + """ + set_one = {1, 2, 3} + set_two = {4, 3, 2, 1} + expected_result = {"old": set(), "new": {4}} + assert expected_result == salt.utils.data.recursive_diff(set_one, set_two) + + +def test_tuple_unequal_length(): + """ + Test case comparing two tuples of unequal length. + This should be the same as comparing two ordered lists. + """ + tuple_one = (1, 2, 3) + tuple_two = (1, 2, 3, 4) + expected_result = {"old": (), "new": (4,)} + assert expected_result == salt.utils.data.recursive_diff(tuple_one, tuple_two) + + +def test_list_unordered(): + """ + Test case comparing two lists unordered. + """ + list_one = [1, 2, 3, 4] + list_two = [4, 3, 2] + expected_result = {"old": [1], "new": []} + assert expected_result == salt.utils.data.recursive_diff( + list_one, list_two, ignore_order=True + ) + + +def test_mixed_nested_unordered(): + """ + Test case comparing nested dicts/lists unordered. + """ + dict_one = {"foo": {"bar": [1, 2, 3]}, "bar": [{"foo": 4}, 0]} + dict_two = {"foo": {"bar": [3, 2, 1]}, "bar": [0, {"foo": 4}]} + expected_result = {} + assert expected_result == salt.utils.data.recursive_diff( + dict_one, dict_two, ignore_order=True + ) + expected_result = { + "old": {"foo": {"bar": [1, 3]}, "bar": [{"foo": 4}, 0]}, + "new": {"foo": {"bar": [3, 1]}, "bar": [0, {"foo": 4}]}, + } + assert expected_result == salt.utils.data.recursive_diff(dict_one, dict_two) + + +def test_ordered_dict_unordered(): + """ + Test case comparing SaltOrderedDicts unordered. + """ + odict_one = SaltOrderedDict([("foo", 1), ("bar", 2), ("baz", 3)]) + odict_two = SaltOrderedDict([("baz", 3), ("bar", 2), ("foo", 1)]) + expected_result = {} + assert expected_result == salt.utils.data.recursive_diff( + odict_one, odict_two, ignore_order=True + ) + + +def test_ignore_missing_keys_dict(): + """ + Test case ignoring missing keys on a comparison of dicts. + """ + dict_one = {"foo": 1, "bar": 2, "baz": 3} + dict_two = {"bar": 3} + expected_result = {"old": {"bar": 2}, "new": {"bar": 3}} + assert expected_result == salt.utils.data.recursive_diff( + dict_one, dict_two, ignore_missing_keys=True + ) + + +def test_ignore_missing_keys_ordered_dict(): + """ + Test case not ignoring missing keys on a comparison of SaltOrderedDicts. + """ + odict_one = SaltOrderedDict([("foo", 1), ("bar", 2), ("baz", 3)]) + odict_two = SaltOrderedDict([("bar", 3)]) + expected_result = {"old": odict_one, "new": odict_two} + assert expected_result == salt.utils.data.recursive_diff( + odict_one, odict_two, ignore_missing_keys=True + ) + + +def test_ignore_missing_keys_recursive(): + """ + Test case ignoring missing keys on a comparison of nested dicts. + """ + dict_one = {"foo": {"bar": 2, "baz": 3}} + dict_two = {"foo": {"baz": 3}} + expected_result = {} + assert expected_result == salt.utils.data.recursive_diff( + dict_one, dict_two, ignore_missing_keys=True + ) + # Compare from dict-in-dict + dict_two = {} + assert expected_result == salt.utils.data.recursive_diff( + dict_one, dict_two, ignore_missing_keys=True + ) + # Compare from dict-in-list + dict_one = {"foo": ["bar", {"baz": 3}]} + dict_two = {"foo": ["bar", {}]} + assert expected_result == salt.utils.data.recursive_diff( + dict_one, dict_two, ignore_missing_keys=True + ) diff --git a/tests/pytests/unit/utils/test_gitfs.py b/tests/pytests/unit/utils/test_gitfs.py new file mode 100644 index 000000000000..de630cd867e5 --- /dev/null +++ b/tests/pytests/unit/utils/test_gitfs.py @@ -0,0 +1,251 @@ +import os +import time + +import pytest + +import salt.config +import salt.fileserver.gitfs +import salt.utils.gitfs +from salt.exceptions import FileserverConfigError +from tests.support.mock import MagicMock, patch + +try: + HAS_PYGIT2 = ( + salt.utils.gitfs.PYGIT2_VERSION + and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER + and salt.utils.gitfs.LIBGIT2_VERSION + and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER + ) +except AttributeError: + HAS_PYGIT2 = False + + +if HAS_PYGIT2: + import pygit2 + + +@pytest.fixture +def minion_opts(tmp_path): + """ + Default minion configuration with relative temporary paths to not require root permissions. + """ + root_dir = tmp_path / "minion" + opts = salt.config.DEFAULT_MINION_OPTS.copy() + opts["__role"] = "minion" + opts["root_dir"] = str(root_dir) + for name in ("cachedir", "pki_dir", "sock_dir", "conf_dir"): + dirpath = root_dir / name + dirpath.mkdir(parents=True) + opts[name] = str(dirpath) + opts["log_file"] = "logs/minion.log" + return opts + + +@pytest.mark.parametrize( + "role_name,role_class", + ( + ("gitfs", salt.utils.gitfs.GitFS), + ("git_pillar", salt.utils.gitfs.GitPillar), + ("winrepo", salt.utils.gitfs.WinRepo), + ), +) +def test_provider_case_insensitive_gitfs_provider(minion_opts, role_name, role_class): + """ + Ensure that both lowercase and non-lowercase values are supported + """ + provider = "GitPython" + key = "{}_provider".format(role_name) + with patch.object(role_class, "verify_gitpython", MagicMock(return_value=True)): + with patch.object(role_class, "verify_pygit2", MagicMock(return_value=False)): + args = [minion_opts, {}] + kwargs = {"init_remotes": False} + if role_name == "winrepo": + kwargs["cache_root"] = "/tmp/winrepo-dir" + with patch.dict(minion_opts, {key: provider}): + # Try to create an instance with uppercase letters in + # provider name. If it fails then a + # FileserverConfigError will be raised, so no assert is + # necessary. + role_class(*args, **kwargs) + # Now try to instantiate an instance with all lowercase + # letters. Again, no need for an assert here. + role_class(*args, **kwargs) + + +@pytest.mark.parametrize( + "role_name,role_class", + ( + ("gitfs", salt.utils.gitfs.GitFS), + ("git_pillar", salt.utils.gitfs.GitPillar), + ("winrepo", salt.utils.gitfs.WinRepo), + ), +) +def test_valid_provider_gitfs_provider(minion_opts, role_name, role_class): + """ + Ensure that an invalid provider is not accepted, raising a + FileserverConfigError. + """ + + def _get_mock(verify, provider): + """ + Return a MagicMock with the desired return value + """ + return MagicMock(return_value=verify.endswith(provider)) + + key = "{}_provider".format(role_name) + for provider in salt.utils.gitfs.GIT_PROVIDERS: + verify = "verify_gitpython" + mock1 = _get_mock(verify, provider) + with patch.object(role_class, verify, mock1): + verify = "verify_pygit2" + mock2 = _get_mock(verify, provider) + with patch.object(role_class, verify, mock2): + args = [minion_opts, {}] + kwargs = {"init_remotes": False} + if role_name == "winrepo": + kwargs["cache_root"] = "/tmp/winrepo-dir" + with patch.dict(minion_opts, {key: provider}): + role_class(*args, **kwargs) + with patch.dict(minion_opts, {key: "foo"}): + # Set the provider name to a known invalid provider + # and make sure it raises an exception. + with pytest.raises(FileserverConfigError): + role_class(*args, **kwargs) + + +@pytest.fixture +def _prepare_remote_repository_pygit2(tmp_path): + remote = os.path.join(tmp_path, "pygit2-repo") + filecontent = "This is an empty README file" + filename = "README" + signature = pygit2.Signature( + "Dummy Commiter", "dummy@dummy.com", int(time.time()), 0 + ) + repository = pygit2.init_repository(remote, False) + builder = repository.TreeBuilder() + tree = builder.write() + commit = repository.create_commit( + "HEAD", signature, signature, "Create master branch", tree, [] + ) + repository.create_reference("refs/tags/simple_tag", commit) + with salt.utils.files.fopen( + os.path.join(repository.workdir, filename), "w" + ) as file: + file.write(filecontent) + blob = repository.create_blob_fromworkdir(filename) + builder = repository.TreeBuilder() + builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB) + tree = builder.write() + repository.index.read() + repository.index.add(filename) + repository.index.write() + commit = repository.create_commit( + "HEAD", + signature, + signature, + "Added a README", + tree, + [repository.head.target], + ) + repository.create_tag( + "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message" + ) + return remote + + +@pytest.fixture +def _prepare_provider(tmp_path, minion_opts, _prepare_remote_repository_pygit2): + cache = tmp_path / "pygit2-repo-cache" + minion_opts.update( + { + "cachedir": str(cache), + "gitfs_disable_saltenv_mapping": False, + "gitfs_base": "master", + "gitfs_insecure_auth": False, + "gitfs_mountpoint": "", + "gitfs_passphrase": "", + "gitfs_password": "", + "gitfs_privkey": "", + "gitfs_provider": "pygit2", + "gitfs_pubkey": "", + "gitfs_ref_types": ["branch", "tag", "sha"], + "gitfs_refspecs": [ + "+refs/heads/*:refs/remotes/origin/*", + "+refs/tags/*:refs/tags/*", + ], + "gitfs_root": "", + "gitfs_saltenv_blacklist": [], + "gitfs_saltenv_whitelist": [], + "gitfs_ssl_verify": True, + "gitfs_update_interval": 3, + "gitfs_user": "", + "verified_gitfs_provider": "pygit2", + } + ) + per_remote_defaults = { + "base": "master", + "disable_saltenv_mapping": False, + "insecure_auth": False, + "ref_types": ["branch", "tag", "sha"], + "passphrase": "", + "mountpoint": "", + "password": "", + "privkey": "", + "pubkey": "", + "refspecs": [ + "+refs/heads/*:refs/remotes/origin/*", + "+refs/tags/*:refs/tags/*", + ], + "root": "", + "saltenv_blacklist": [], + "saltenv_whitelist": [], + "ssl_verify": True, + "update_interval": 60, + "user": "", + } + per_remote_only = ("all_saltenvs", "name", "saltenv") + override_params = tuple(per_remote_defaults) + cache_root = cache / "gitfs" + role = "gitfs" + provider = salt.utils.gitfs.Pygit2( + minion_opts, + _prepare_remote_repository_pygit2, + per_remote_defaults, + per_remote_only, + override_params, + str(cache_root), + role, + ) + return provider + + +@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") +@pytest.mark.skip_on_windows( + reason="Skip Pygit2 on windows, due to pygit2 access error on windows" +) +def test_checkout_pygit2(_prepare_provider): + provider = _prepare_provider + provider.remotecallbacks = None + provider.credentials = None + provider.init_remote() + provider.fetch() + provider.branch = "master" + assert provider.get_cachedir() in provider.checkout() + provider.branch = "simple_tag" + assert provider.get_cachedir() in provider.checkout() + provider.branch = "annotated_tag" + assert provider.get_cachedir() in provider.checkout() + provider.branch = "does_not_exist" + assert provider.checkout() is None + + +@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") +@pytest.mark.skip_on_windows( + reason="Skip Pygit2 on windows, due to pygit2 access error on windows" +) +@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") +@pytest.mark.skip_on_windows( + reason="Skip Pygit2 on windows, due to pygit2 access error on windows" +) +def test_get_cachedir_basename_pygit2(_prepare_provider): + assert "_" == _prepare_provider.get_cache_basename() diff --git a/tests/pytests/unit/utils/test_http.py b/tests/pytests/unit/utils/test_http.py index 52bf3d2ca286..dfb60085de2d 100644 --- a/tests/pytests/unit/utils/test_http.py +++ b/tests/pytests/unit/utils/test_http.py @@ -1,7 +1,9 @@ import pytest import requests +from pytestshellutils.utils import ports +from werkzeug.wrappers import Response # pylint: disable=3rd-party-module-not-gated -import salt.utils.http +import salt.utils.http as http from tests.support.mock import MagicMock, patch @@ -16,10 +18,10 @@ def test_requests_session_verify_ssl_false(ssl_webserver, integration_files_dir) if verify is True or verify is None: with pytest.raises(requests.exceptions.SSLError) as excinfo: - session = salt.utils.http.session(**kwargs) + session = http.session(**kwargs) ret = session.get(ssl_webserver.url("this.txt")) else: - session = salt.utils.http.session(**kwargs) + session = http.session(**kwargs) ret = session.get(ssl_webserver.url("this.txt")) assert ret.status_code == 200 @@ -29,7 +31,7 @@ def test_session_ca_bundle_verify_false(): test salt.utils.http.session when using both ca_bunlde and verify_ssl false """ - ret = salt.utils.http.session(ca_bundle="/tmp/test_bundle", verify_ssl=False) + ret = http.session(ca_bundle="/tmp/test_bundle", verify_ssl=False) assert ret is False @@ -38,7 +40,7 @@ def test_session_headers(): test salt.utils.http.session when setting headers """ - ret = salt.utils.http.session(headers={"Content-Type": "application/json"}) + ret = http.session(headers={"Content-Type": "application/json"}) assert ret.headers["Content-Type"] == "application/json" @@ -49,5 +51,260 @@ def test_session_ca_bundle(): fpath = "/tmp/test_bundle" patch_os = patch("os.path.exists", MagicMock(return_value=True)) with patch_os: - ret = salt.utils.http.session(ca_bundle=fpath) + ret = http.session(ca_bundle=fpath) assert ret.verify == fpath + + +def test_sanitize_url_hide_fields_none(): + """ + Tests sanitizing a url when the hide_fields kwarg is None. + """ + mock_url = "https://api.testing.com/?&foo=bar&test=testing" + ret = http.sanitize_url(mock_url, hide_fields=None) + assert ret == mock_url + + +def test_sanitize_url_no_elements(): + """ + Tests sanitizing a url when no elements should be sanitized. + """ + mock_url = "https://api.testing.com/?&foo=bar&test=testing" + ret = http.sanitize_url(mock_url, [""]) + assert ret == mock_url + + +def test_sanitize_url_single_element(): + """ + Tests sanitizing a url with only a single element to be sanitized. + """ + mock_url = ( + "https://api.testing.com/?&keep_it_secret=abcdefghijklmn" + "&api_action=module.function" + ) + mock_ret = ( + "https://api.testing.com/?&keep_it_secret=XXXXXXXXXX&" + "api_action=module.function" + ) + ret = http.sanitize_url(mock_url, ["keep_it_secret"]) + assert ret == mock_ret + + +def test_sanitize_url_multiple_elements(): + """ + Tests sanitizing a url with multiple elements to be sanitized. + """ + mock_url = ( + "https://api.testing.com/?rootPass=badpassword%21" + "&skipChecks=True&api_key=abcdefghijklmn" + "&NodeID=12345&api_action=module.function" + ) + mock_ret = ( + "https://api.testing.com/?rootPass=XXXXXXXXXX" + "&skipChecks=True&api_key=XXXXXXXXXX" + "&NodeID=12345&api_action=module.function" + ) + ret = http.sanitize_url(mock_url, ["api_key", "rootPass"]) + assert ret == mock_ret + + +# _sanitize_components tests + + +def test_sanitize_components_no_elements(): + """ + Tests when zero elements need to be sanitized. + """ + mock_component_list = ["foo=bar", "bar=baz", "hello=world"] + mock_ret = "foo=bar&bar=baz&hello=world&" + ret = http._sanitize_url_components(mock_component_list, "api_key") + assert ret == mock_ret + + +def test_sanitize_components_one_element(): + """ + Tests a single component to be sanitized. + """ + mock_component_list = ["foo=bar", "api_key=abcdefghijklmnop"] + mock_ret = "foo=bar&api_key=XXXXXXXXXX&" + ret = http._sanitize_url_components(mock_component_list, "api_key") + assert ret == mock_ret + + +def test_sanitize_components_multiple_elements(): + """ + Tests two componenets to be sanitized. + """ + mock_component_list = ["foo=bar", "foo=baz", "api_key=testing"] + mock_ret = "foo=XXXXXXXXXX&foo=XXXXXXXXXX&api_key=testing&" + ret = http._sanitize_url_components(mock_component_list, "foo") + assert ret == mock_ret + + +@pytest.mark.slow_test +def test_query_null_response(): + """ + This tests that we get a null response when raise_error=False and the + host/port cannot be reached. + """ + host = "127.0.0.1" + + port = ports.get_unused_localhost_port() + + url = "http://{host}:{port}/".format(host=host, port=port) + result = http.query(url, raise_error=False) + assert result == {"body": None}, result + + +def test_query_error_handling(): + ret = http.query("http://127.0.0.1:0") + assert isinstance(ret, dict) + assert isinstance(ret.get("error", None), str) + ret = http.query("http://myfoobardomainthatnotexist") + assert isinstance(ret, dict) + assert isinstance(ret.get("error", None), str) + + +def test_parse_cookie_header(): + header = "; ".join( + [ + "foo=bar", + "expires=Mon, 03-Aug-20 14:26:27 GMT", + "path=/", + "domain=.mydomain.tld", + "HttpOnly", + "SameSite=Lax", + "Secure", + ] + ) + ret = http.parse_cookie_header(header) + cookie = ret.pop(0) + assert cookie.name == "foo", cookie.name + assert cookie.value == "bar", cookie.value + assert cookie.expires == 1596464787, cookie.expires + assert cookie.path == "/", cookie.path + assert cookie.domain == ".mydomain.tld", cookie.domain + assert cookie.secure + # Only one cookie should have been returned, if anything is left in the + # parse_cookie_header return then something went wrong. + assert not ret + + +@pytest.mark.requires_network +def test_requests_multipart_formdata_post(httpserver): + """ + Test handling of a multipart/form-data POST using the requests backend + """ + match_this = ( + "{0}\r\nContent-Disposition: form-data;" + ' name="fieldname_here"\r\n\r\nmydatahere\r\n{0}--\r\n' + ) + + def mirror_post_handler(request): + return Response(request.data) + + httpserver.expect_request( + "/multipart_form_data", + ).respond_with_handler(mirror_post_handler) + url = httpserver.url_for("/multipart_form_data") + + ret = http.query( + url, + method="POST", + data="mydatahere", + formdata=True, + formdata_fieldname="fieldname_here", + backend="requests", + ) + body = ret.get("body", "") + boundary = body[: body.find("\r")] + assert body == match_this.format(boundary) + + +def test_query_proxy(httpserver): + """ + Test http.query with tornado and with proxy opts set + and then test with no_proxy set to ensure we dont + run into issue #55192 again. + """ + data = "mydatahere" + opts = { + "proxy_host": "127.0.0.1", + "proxy_port": 88, + "proxy_username": "salt_test", + "proxy_password": "super_secret", + } + + with patch("requests.Session") as mock_session: + mock_session.return_value = MagicMock() + ret = http.query( + "https://fake_url", + method="POST", + data=data, + backend="tornado", + opts=opts, + ) + + assert mock_session.return_value.proxies == { + "http": "http://salt_test:super_secret@127.0.0.1:88" + } + + opts["no_proxy"] = [httpserver.host] + + httpserver.expect_request( + "/no_proxy_test", + ).respond_with_data(data) + url = httpserver.url_for("/no_proxy_test") + + with patch("requests.Session") as mock_session: + mock_session.return_value = MagicMock() + ret = http.query( + url, + method="POST", + data=data, + backend="tornado", + opts=opts, + ) + assert not isinstance(mock_session.return_value.proxies, dict) + + ret = http.query(url, method="POST", data=data, backend="tornado", opts=opts) + body = ret.get("body", "") + assert body == data + + +@pytest.mark.parametrize("backend", ["requests", "tornado", "urllib2"]) +def test_backends_decode_body_false(httpserver, backend): + """ + test all backends when using + decode_body=False that it returns + bytes and does not try to decode + """ + url = "/test-bytes" + data = b"test-bytes" + httpserver.expect_request( + url, + ).respond_with_data(data, content_type="application/octet-stream") + ret = http.query( + httpserver.url_for(url), + backend=backend, + decode_body=False, + ) + body = ret.get("body", "") + assert isinstance(body, bytes) + + +@pytest.mark.parametrize("backend", ["requests", "tornado", "urllib2"]) +def test_backends_decode_body_true(httpserver, backend): + """ + test all backends when using decode_body=True that it returns string and decodes it. + """ + url = "/test-decoded-bytes" + data = b"test-decoded-bytes" + httpserver.expect_request( + url, + ).respond_with_data(data, content_type="application/octet-stream") + ret = http.query( + httpserver.url_for(url), + backend=backend, + ) + body = ret.get("body", "") + assert isinstance(body, str) diff --git a/tests/pytests/unit/utils/test_listdiffer.py b/tests/pytests/unit/utils/test_listdiffer.py new file mode 100644 index 000000000000..dfc42a4fe9f5 --- /dev/null +++ b/tests/pytests/unit/utils/test_listdiffer.py @@ -0,0 +1,180 @@ +import pytest + +from salt.utils.dictdiffer import RecursiveDictDiffer +from salt.utils.listdiffer import list_diff + + +@pytest.fixture +def get_old_list(): + return [ + {"key": 1, "value": "foo1", "int_value": 101}, + {"key": 2, "value": "foo2", "int_value": 102}, + {"key": 3, "value": "foo3", "int_value": 103}, + ] + + +@pytest.fixture +def get_new_list(): + return [ + {"key": 1, "value": "foo1", "int_value": 101}, + {"key": 2, "value": "foo2", "int_value": 112}, + {"key": 5, "value": "foo5", "int_value": 105}, + ] + + +@pytest.fixture +def get_list_diff(get_old_list, get_new_list): + return list_diff(get_old_list, get_new_list, key="key") + + +def test_added(get_list_diff): + assert len(get_list_diff.added) == 1 + assert get_list_diff.added[0] == {"key": 5, "value": "foo5", "int_value": 105} + + +def test_removed(get_list_diff): + assert len(get_list_diff.removed) == 1 + assert get_list_diff.removed[0] == {"key": 3, "value": "foo3", "int_value": 103} + + +def test_diffs(get_list_diff): + assert len(get_list_diff.diffs) == 3 + assert get_list_diff.diffs[0] == {2: {"int_value": {"new": 112, "old": 102}}} + + # Added items + assert get_list_diff.diffs[1] == { + 5: { + "int_value": {"new": 105, "old": RecursiveDictDiffer.NONE_VALUE}, + "key": {"new": 5, "old": RecursiveDictDiffer.NONE_VALUE}, + "value": {"new": "foo5", "old": RecursiveDictDiffer.NONE_VALUE}, + } + } + + # Removed items + assert get_list_diff.diffs[2] == { + 3: { + "int_value": {"new": RecursiveDictDiffer.NONE_VALUE, "old": 103}, + "key": {"new": RecursiveDictDiffer.NONE_VALUE, "old": 3}, + "value": {"new": RecursiveDictDiffer.NONE_VALUE, "old": "foo3"}, + } + } + + +def test_new_values(get_list_diff): + assert len(get_list_diff.new_values) == 2 + assert get_list_diff.new_values[0] == {"key": 2, "int_value": 112} + assert get_list_diff.new_values[1] == {"key": 5, "value": "foo5", "int_value": 105} + + +def test_old_values(get_list_diff): + assert len(get_list_diff.old_values) == 2 + assert get_list_diff.old_values[0] == {"key": 2, "int_value": 102} + assert get_list_diff.old_values[1] == {"key": 3, "value": "foo3", "int_value": 103} + + +def test_changed_all(get_list_diff): + assert get_list_diff.changed(selection="all") == [ + "key.2.int_value", + "key.5.int_value", + "key.5.value", + "key.3.int_value", + "key.3.value", + ] + + +def test_changed_intersect(get_list_diff): + assert get_list_diff.changed(selection="intersect") == ["key.2.int_value"] + + +def test_changes_str(get_list_diff): + expected = """\tidentified by key 2: +\tint_value from 102 to 112 +\tidentified by key 3: +\twill be removed +\tidentified by key 5: +\twill be added +""" + assert get_list_diff.changes_str == expected + + +def test_intersect(get_list_diff): + expected = [ + { + "key": 1, + "old": {"key": 1, "value": "foo1", "int_value": 101}, + "new": {"key": 1, "value": "foo1", "int_value": 101}, + }, + { + "key": 2, + "old": {"key": 2, "value": "foo2", "int_value": 102}, + "new": {"key": 2, "value": "foo2", "int_value": 112}, + }, + ] + test_isect = get_list_diff.intersect + assert test_isect == expected + + +def test_remove_diff_intersect(get_list_diff): + expected = [ + { + "key": 1, + "old": {"key": 1, "int_value": 101}, + "new": {"key": 1, "int_value": 101}, + }, + { + "key": 2, + "old": {"key": 2, "int_value": 102}, + "new": {"key": 2, "int_value": 112}, + }, + ] + + get_list_diff.remove_diff(diff_key="value") + test_isect = get_list_diff.intersect + assert test_isect == expected + + +def test_remove_diff_removed(get_list_diff): + expected = [ + { + "key": 1, + "old": {"key": 1, "value": "foo1", "int_value": 101}, + "new": {"key": 1, "value": "foo1", "int_value": 101}, + }, + { + "key": 2, + "old": {"key": 2, "value": "foo2", "int_value": 102}, + "new": {"key": 2, "value": "foo2", "int_value": 112}, + }, + ] + get_list_diff.remove_diff(diff_key="value", diff_list="removed") + test_isect = get_list_diff.intersect + assert test_isect == expected + + +def test_changes_str2(get_list_diff): + expected = """ key=2 (updated): + int_value from 102 to 112 + key=3 (removed) + key=5 (added): {'key': 5, 'value': 'foo5', 'int_value': 105}""" + test_changes = get_list_diff.changes_str2 + assert test_changes == expected + + +def test_current_list(get_list_diff): + expected = [ + {"key": 1, "value": "foo1", "int_value": 101}, + {"key": 2, "value": "foo2", "int_value": 102}, + {"key": 3, "value": "foo3", "int_value": 103}, + ] + test_curr_list = get_list_diff.current_list + assert test_curr_list == expected + + +def test_new_list(get_list_diff): + expected = [ + {"key": 1, "value": "foo1", "int_value": 101}, + {"key": 2, "value": "foo2", "int_value": 112}, + {"key": 5, "value": "foo5", "int_value": 105}, + ] + test_new_list = get_list_diff.new_list + assert test_new_list == expected diff --git a/tests/pytests/unit/utils/test_mako.py b/tests/pytests/unit/utils/test_mako.py new file mode 100644 index 000000000000..952cf44652ee --- /dev/null +++ b/tests/pytests/unit/utils/test_mako.py @@ -0,0 +1,28 @@ +import pytest + +from tests.support.mock import Mock, call, patch + +pytest.importorskip("mako") + +# This import needs to be after the above importorskip so that no ImportError +# is raised if Mako is not installed +from salt.utils.mako import SaltMakoTemplateLookup + + +def test_mako_template_lookup(minion_opts): + """ + The shudown method can be called without raising an exception when the + file_client does not have a destroy method + """ + # Test SaltCacheLoader creating and destroying the file client created + file_client = Mock() + with patch("salt.fileclient.get_file_client", return_value=file_client): + loader = SaltMakoTemplateLookup(minion_opts) + assert loader._file_client is None + assert loader.file_client() is file_client + assert loader._file_client is file_client + try: + loader.destroy() + except AttributeError: + pytest.fail("Regression when calling SaltMakoTemplateLookup.destroy()") + assert file_client.mock_calls == [call.destroy()] diff --git a/tests/pytests/unit/utils/test_nacl.py b/tests/pytests/unit/utils/test_nacl.py index baf4024b8193..77f40d6fbf98 100644 --- a/tests/pytests/unit/utils/test_nacl.py +++ b/tests/pytests/unit/utils/test_nacl.py @@ -1,10 +1,40 @@ """ Unit tests for the salt.utils.nacl module """ +import os -import salt.utils.nacl +import pytest + +import salt.modules.config as config +import salt.utils.files from tests.support.mock import patch +pytest.importorskip("nacl.public") +pytest.importorskip("nacl.secret") + +import salt.utils.nacl as nacl + + +@pytest.fixture +def configure_loader_modules(): + return { + nacl: {"__salt__": {"config.get": config.get}}, + config: {"__opts__": {}}, + } + + +@pytest.fixture(scope="module") +def test_keygen(): + """ + test nacl.keygen function + + Note: nacl.keygen returns base64 encoded values + """ + ret = nacl.keygen() + assert "sk" in ret + assert "pk" in ret + return ret + def test_fips_mode(): """ @@ -14,3 +44,122 @@ def test_fips_mode(): with patch("salt.utils.nacl.__opts__", opts, create=True): ret = salt.utils.nacl.__virtual__() assert ret == (False, "nacl utils not available in FIPS mode") + + +def test_keygen_sk_file(test_keygen): + """ + test nacl.keygen function + with sk_file set + """ + with pytest.helpers.temp_file("test_keygen_sk_file") as fpath: + with salt.utils.files.fopen(fpath, "wb") as wfh: + wfh.write(test_keygen["sk"]) + + # test sk_file + ret = nacl.keygen(sk_file=fpath) + assert "saved pk_file: {}.pub".format(fpath) == ret + salt.utils.files.remove(str(fpath) + ".pub") + + +def test_keygen_keyfile(test_keygen): + """ + test nacl.keygen function + with keyfile set + """ + with pytest.helpers.temp_file("test_keygen_keyfile") as fpath: + with salt.utils.files.fopen(fpath, "wb") as wfh: + wfh.write(test_keygen["sk"]) + + ret = nacl.keygen(keyfile=fpath) + assert "saved pk_file: {}.pub".format(fpath) == ret + salt.utils.files.remove(str(fpath) + ".pub") + + +def test_enc_keyfile(test_keygen): + """ + test nacl.enc function + with keyfile and pk_file set + """ + with pytest.helpers.temp_file("test_enc_keyfile") as fpath: + with salt.utils.files.fopen(fpath, "wb") as wfh: + wfh.write(test_keygen["sk"]) + with salt.utils.files.fopen(str(fpath) + ".pub", "wb") as wfhpub: + wfhpub.write(test_keygen["pk"]) + + kwargs = { + "opts": {"pki_dir": os.path.dirname(fpath)}, + "keyfile": str(fpath), + "pk_file": str(fpath) + ".pub", + } + ret = nacl.enc("blah", **kwargs) + assert isinstance(ret, bytes) + salt.utils.files.remove(str(fpath) + ".pub") + + +def test_enc_sk_file(test_keygen): + """ + test nacl.enc function + with sk_file and pk_file set + """ + with pytest.helpers.temp_file("test_enc_sk_file") as fpath: + with salt.utils.files.fopen(fpath, "wb") as wfh: + wfh.write(test_keygen["sk"]) + with salt.utils.files.fopen(str(fpath) + ".pub", "wb") as wfhpub: + wfhpub.write(test_keygen["pk"]) + + kwargs = { + "opts": {"pki_dir": os.path.dirname(fpath)}, + "sk_file": str(fpath), + "pk_file": str(fpath) + ".pub", + } + ret = nacl.enc("blah", **kwargs) + assert isinstance(ret, bytes) + salt.utils.files.remove(str(fpath) + ".pub") + + +def test_dec_keyfile(test_keygen): + """ + test nacl.dec function + with keyfile and pk_file set + """ + with pytest.helpers.temp_file("test_dec_keyfile") as fpath: + with salt.utils.files.fopen(fpath, "wb") as wfh: + wfh.write(test_keygen["sk"]) + with salt.utils.files.fopen(str(fpath) + ".pub", "wb") as wfhpub: + wfhpub.write(test_keygen["pk"]) + + kwargs = { + "opts": {"pki_dir": os.path.dirname(fpath)}, + "keyfile": str(fpath), + "pk_file": str(fpath) + ".pub", + } + + enc_data = nacl.enc("blah", **kwargs) + ret = nacl.dec(enc_data, **kwargs) + assert isinstance(ret, bytes) + assert ret == b"blah" + salt.utils.files.remove(str(fpath) + ".pub") + + +def test_dec_sk_file(test_keygen): + """ + test nacl.dec function + with sk_file and pk_file set + """ + with pytest.helpers.temp_file("test_dec_sk_file") as fpath: + with salt.utils.files.fopen(fpath, "wb") as wfh: + wfh.write(test_keygen["sk"]) + with salt.utils.files.fopen(str(fpath) + ".pub", "wb") as wfhpub: + wfhpub.write(test_keygen["pk"]) + + kwargs = { + "opts": {"pki_dir": os.path.dirname(fpath)}, + "sk_file": str(fpath), + "pk_file": str(fpath) + ".pub", + } + + enc_data = nacl.enc("blah", **kwargs) + ret = nacl.dec(enc_data, **kwargs) + assert isinstance(ret, bytes) + assert ret == b"blah" + salt.utils.files.remove(str(fpath) + ".pub") diff --git a/tests/pytests/unit/utils/test_network.py b/tests/pytests/unit/utils/test_network.py index c5f976f6749d..12d545b01545 100644 --- a/tests/pytests/unit/utils/test_network.py +++ b/tests/pytests/unit/utils/test_network.py @@ -1,8 +1,1597 @@ +import logging +import socket +import textwrap + +import pytest + +import salt.exceptions import salt.utils.network +import salt.utils.network as network +from salt._compat import ipaddress +from tests.support.mock import MagicMock, create_autospec, mock_open, patch +pytestmark = [ + pytest.mark.skip_on_windows, +] -def test_junos_ifconfig_output_parsing(): - ret = salt.utils.network._junos_interfaces_ifconfig( - "inet mtu 0 local=" + " " * 3456 + +log = logging.getLogger(__name__) + +LINUX = """\ +eth0 Link encap:Ethernet HWaddr e0:3f:49:85:6a:af + inet addr:10.10.10.56 Bcast:10.10.10.255 Mask:255.255.252.0 + inet6 addr: fe80::e23f:49ff:fe85:6aaf/64 Scope:Link + UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 + RX packets:643363 errors:0 dropped:0 overruns:0 frame:0 + TX packets:196539 errors:0 dropped:0 overruns:0 carrier:0 + collisions:0 txqueuelen:1000 + RX bytes:386388355 (368.4 MiB) TX bytes:25600939 (24.4 MiB) + +lo Link encap:Local Loopback + inet addr:127.0.0.1 Mask:255.0.0.0 + inet6 addr: ::1/128 Scope:Host + UP LOOPBACK RUNNING MTU:65536 Metric:1 + RX packets:548901 errors:0 dropped:0 overruns:0 frame:0 + TX packets:548901 errors:0 dropped:0 overruns:0 carrier:0 + collisions:0 txqueuelen:0 + RX bytes:613479895 (585.0 MiB) TX bytes:613479895 (585.0 MiB) +""" + +FREEBSD = """ +em0: flags=8843 metric 0 mtu 1500 + options=4219b + ether 00:30:48:ff:ff:ff + inet 10.10.10.250 netmask 0xffffffe0 broadcast 10.10.10.255 + inet 10.10.10.56 netmask 0xffffffc0 broadcast 10.10.10.63 + media: Ethernet autoselect (1000baseT ) + status: active +em1: flags=8c02 metric 0 mtu 1500 + options=4219b + ether 00:30:48:aa:aa:aa + media: Ethernet autoselect + status: no carrier +plip0: flags=8810 metric 0 mtu 1500 +lo0: flags=8049 metric 0 mtu 16384 + options=3 + inet6 fe80::1%lo0 prefixlen 64 scopeid 0x8 + inet6 ::1 prefixlen 128 + inet 127.0.0.1 netmask 0xff000000 + nd6 options=3 +tun0: flags=8051 metric 0 mtu 1500 + options=80000 + inet 10.12.0.1 --> 10.12.0.2 netmask 0xffffffff + Opened by PID 1964 +""" + +SOLARIS = """\ +lo0: flags=2001000849 mtu 8232 index 1 + inet 127.0.0.1 netmask ff000000 +net0: flags=100001100943 mtu 1500 index 2 + inet 10.10.10.38 netmask ffffffe0 broadcast 10.10.10.63 +ilbint0: flags=110001100843 mtu 1500 index 3 + inet 10.6.0.11 netmask ffffff00 broadcast 10.6.0.255 +ilbext0: flags=110001100843 mtu 1500 index 4 + inet 10.10.11.11 netmask ffffffe0 broadcast 10.10.11.31 +ilbext0:1: flags=110001100843 mtu 1500 index 4 + inet 10.10.11.12 netmask ffffffe0 broadcast 10.10.11.31 +vpn0: flags=1000011008d1 mtu 1480 index 5 + inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 + tunnel hop limit 64 + inet 10.6.0.14 --> 10.6.0.15 netmask ff000000 +lo0: flags=2002000849 mtu 8252 index 1 + inet6 ::1/128 +net0: flags=120002004941 mtu 1500 index 2 + inet6 fe80::221:9bff:fefd:2a22/10 +ilbint0: flags=120002000840 mtu 1500 index 3 + inet6 ::/0 +ilbext0: flags=120002000840 mtu 1500 index 4 + inet6 ::/0 +vpn0: flags=120002200850 mtu 1480 index 5 + inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 + tunnel hop limit 64 + inet6 ::/0 --> fe80::b2d6:7c10 +""" + +NETBSD = """\ +vioif0: flags=0x8943 mtu 1500 + ec_capabilities=1 + ec_enabled=0 + address: 00:a0:98:e6:83:18 + inet 192.168.1.80/24 broadcast 192.168.1.255 flags 0x0 + inet6 fe80::2a0:98ff:fee6:8318%vioif0/64 flags 0x0 scopeid 0x1 +lo0: flags=0x8049 mtu 33624 + inet 127.0.0.1/8 flags 0x0 + inet6 ::1/128 flags 0x20 + inet6 fe80::1%lo0/64 flags 0x0 scopeid 0x2 +""" + +FREEBSD_SOCKSTAT = """\ +USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS +root python2.7 1294 41 tcp4 127.0.0.1:61115 127.0.0.1:4506 +""" + +FREEBSD_SOCKSTAT_WITH_FAT_PID = """\ +USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS +salt-master python2.781106 35 tcp4 127.0.0.1:61115 127.0.0.1:4506 +""" + +OPENBSD_NETSTAT = """\ +Active Internet connections +Proto Recv-Q Send-Q Local Address Foreign Address (state) +tcp 0 0 127.0.0.1.61115 127.0.0.1.4506 ESTABLISHED +""" + +LINUX_NETLINK_SS_OUTPUT = """\ +State Recv-Q Send-Q Local Address:Port Peer Address:Port +TIME-WAIT 0 0 [::1]:8009 [::1]:40368 +LISTEN 0 128 127.0.0.1:5903 0.0.0.0:* +ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315 +ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545 +ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505 +ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505 +""" + +IPV4_SUBNETS = { + True: ("10.10.0.0/24",), + False: ("10.10.0.0", "10.10.0.0/33", "FOO", 9, "0.9.800.1000/24"), +} +IPV6_SUBNETS = { + True: ("::1/128",), + False: ("::1", "::1/129", "FOO", 9, "aj01::feac/64"), +} + + +_ip = ipaddress.ip_address + + +@pytest.fixture(scope="module") +def linux_interfaces_dict(): + return { + "eth0": { + "hwaddr": "e0:3f:49:85:6a:af", + "inet": [ + { + "address": "10.10.10.56", + "broadcast": "10.10.10.255", + "netmask": "255.255.252.0", + } + ], + "inet6": [ + { + "address": "fe80::e23f:49ff:fe85:6aaf", + "prefixlen": "64", + "scope": "link", + } + ], + "up": True, + }, + "lo": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}], + "up": True, + }, + } + + +@pytest.fixture(scope="module") +def freebsd_interfaces_dict(): + return { + "": {"up": False}, + "em0": { + "hwaddr": "00:30:48:ff:ff:ff", + "inet": [ + { + "address": "10.10.10.250", + "broadcast": "10.10.10.255", + "netmask": "255.255.255.224", + }, + { + "address": "10.10.10.56", + "broadcast": "10.10.10.63", + "netmask": "255.255.255.192", + }, + ], + "up": True, + }, + "em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False}, + "lo0": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [ + {"address": "fe80::1", "prefixlen": "64", "scope": "0x8"}, + {"address": "::1", "prefixlen": "128", "scope": None}, + ], + "up": True, + }, + "plip0": {"up": False}, + "tun0": { + "inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}], + "up": True, + }, + } + + +def test_sanitize_host_ip(): + ret = network.sanitize_host("10.1./2.$3") + assert ret == "10.1.2.3" + + +def test_sanitize_host_name(): + """ + Should not remove the underscore + """ + ret = network.sanitize_host("foo_bar") + assert ret == "foo_bar" + + +def test_host_to_ips(): + """ + NOTE: When this test fails it's usually because the IP address has + changed. In these cases, we just need to update the IP address in the + assertion. + """ + + def getaddrinfo_side_effect(host, *args): + if host == "github.com": + return [ + (2, 1, 6, "", ("192.30.255.112", 0)), + (2, 1, 6, "", ("192.30.255.113", 0)), + ] + if host == "ipv6host.foo": + return [ + (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), + ] + raise socket.gaierror(-2, "Name or service not known") + + getaddrinfo_mock = MagicMock(side_effect=getaddrinfo_side_effect) + with patch.object(socket, "getaddrinfo", getaddrinfo_mock): + # Test host that can be resolved + ret = network.host_to_ips("github.com") + assert ret == ["192.30.255.112", "192.30.255.113"] + + # Test ipv6 + ret = network.host_to_ips("ipv6host.foo") + assert ret == ["2001:a71::1"] + # Test host that can't be resolved + ret = network.host_to_ips("someothersite.com") + assert ret is None + + +def test_generate_minion_id(): + assert network.generate_minion_id() + + +def test__generate_minion_id_with_unicode_in_etc_hosts(): + """ + Test that unicode in /etc/hosts doesn't raise an error when + _generate_minion_id() helper is called to gather the hosts. + """ + content = textwrap.dedent( + """\ + # 以下为主机名解析 + ## ccc + 127.0.0.1 localhost thisismyhostname # 本机 + """ ) + fopen_mock = mock_open(read_data={"/etc/hosts": content}) + with patch("salt.utils.files.fopen", fopen_mock): + assert "thisismyhostname" in network._generate_minion_id() + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", True), + ("0.9.800.1000", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ip(addr, expected): + assert network.is_ip(addr) is expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", True), + ("10.100.1", False), + ("2001:db8:0:1:1:1:1:1", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ipv4(addr, expected): + assert network.is_ipv4(addr) is expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("2001:db8:0:1:1:1:1:1", True), + ("0:0:0:0:0:0:0:1", True), + ("::1", True), + ("::", True), + ("2001:0db8:85a3:0000:0000:8a2e:0370:7334", True), + ("2001:0db8:85a3::8a2e:0370:7334", True), + ("2001:0db8:0370:7334", False), + ("2001:0db8:::0370:7334", False), + ("10.0.1.2", False), + ("2001.0db8.85a3.0000.0000.8a2e.0370.7334", False), + # Check 16-char-long unicode string + # https://github.com/saltstack/salt/issues/51258 + ("sixteen-char-str", False), + ), +) +def test_is_ipv6(addr, expected): + assert network.is_ipv6(addr) is expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("2001:db8:0:1:1:1:1:1", "2001:db8:0:1:1:1:1:1"), + ("0:0:0:0:0:0:0:1", "::1"), + ("::1", "::1"), + ("::", "::"), + ("2001:0db8:85a3:0000:0000:8a2e:0370:7334", "2001:db8:85a3::8a2e:370:7334"), + ("2001:0db8:85a3::8a2e:0370:7334", "2001:db8:85a3::8a2e:370:7334"), + ("2001:67c:2e8::/48", "2001:67c:2e8::/48"), + ), +) +def test_ipv6(addr, expected): + assert network.ipv6(addr) == expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("127.0.1.1", True), + ("::1", True), + ("10.0.1.2", False), + ("2001:db8:0:1:1:1:1:1", False), + ), +) +def test_is_loopback(addr, expected): + assert network.is_loopback(addr) is expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("10.10.0.3", (_ip("10.10.0.3").compressed, None)), + ("10.10.0.3:1234", (_ip("10.10.0.3").compressed, 1234)), + ( + "2001:0db8:85a3::8a2e:0370:7334", + ( + _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, + None, + ), + ), + ( + "[2001:0db8:85a3::8a2e:0370:7334]:1234", + ( + _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, + 1234, + ), + ), + ("2001:0db8:85a3::7334", (_ip("2001:0db8:85a3::7334").compressed, None)), + ( + "[2001:0db8:85a3::7334]:1234", + ( + _ip("2001:0db8:85a3::7334").compressed, + 1234, + ), + ), + ), +) +def test_parse_host_port_good(addr, expected): + assert network.parse_host_port(addr) == expected + + +@pytest.mark.parametrize( + "addr", + ( + "10.10.0.3/24", + "10.10.0.3::1234", + "2001:0db8:0370:7334", + "2001:0db8:0370::7334]:1234", + "2001:0db8:0370:0:a:b:c:d:1234", + "host name", + "host name:1234", + "10.10.0.3:abcd", + ), +) +def test_parse_host_port_bad_raises_value_error(addr): + with pytest.raises(ValueError): + network.parse_host_port(addr) + + +@pytest.mark.parametrize( + "host", + ( + ( + { + "host": "10.10.0.3", + "port": "", + "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], + "ret": "10.10.0.3", + } + ), + ( + { + "host": "10.10.0.3", + "port": "1234", + "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], + "ret": "10.10.0.3", + } + ), + ( + { + "host": "2001:0db8:85a3::8a2e:0370:7334", + "port": "", + "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], + "ret": "[2001:db8:85a3::8a2e:370:7334]", + } + ), + ( + { + "host": "2001:0db8:85a3::8a2e:370:7334", + "port": "1234", + "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], + "ret": "[2001:db8:85a3::8a2e:370:7334]", + } + ), + ( + { + "host": "salt-master", + "port": "1234", + "mocked": [(2, 1, 6, "", ("127.0.0.1", 0))], + "ret": "127.0.0.1", + } + ), + ), +) +def test_dns_check(host): + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, return_value=host["mocked"]), + ): + with patch("socket.socket", create_autospec(socket.socket)): + ret = network.dns_check(host["host"], host["port"]) + assert ret == host["ret"] + + +def test_dns_check_ipv6_filter(): + # raise exception to skip everything after the getaddrinfo call + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, side_effect=Exception), + ) as getaddrinfo: + for ipv6, param in [ + (None, socket.AF_UNSPEC), + (True, socket.AF_INET6), + (False, socket.AF_INET), + ]: + with pytest.raises(Exception): + network.dns_check("foo", "1", ipv6=ipv6) + getaddrinfo.assert_called_with("foo", "1", param, socket.SOCK_STREAM) + + +def test_dns_check_errors(): + with patch.object( + socket, "getaddrinfo", create_autospec(socket.getaddrinfo, return_value=[]) + ): + with pytest.raises( + salt.exceptions.SaltSystemExit, + match="DNS lookup or connection check of 'foo' failed.", + ) as exc_info: + network.dns_check("foo", "1") + + with patch.object( + socket, + "getaddrinfo", + create_autospec(socket.getaddrinfo, side_effect=TypeError), + ): + with pytest.raises( + salt.exceptions.SaltSystemExit, match="Invalid or unresolveable address" + ) as exc_info2: + network.dns_check("foo", "1") + + +def test_test_addrs(): + # subset of real data from getaddrinfo against saltstack.com + addrinfo = [ + (30, 2, 17, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), + (30, 1, 6, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), + (30, 2, 17, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), + (30, 1, 6, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), + (2, 1, 6, "", ("13.35.99.52", 0)), + (2, 2, 17, "", ("13.35.99.85", 0)), + (2, 1, 6, "", ("13.35.99.85", 0)), + (2, 2, 17, "", ("13.35.99.122", 0)), + ] + with patch("socket.socket", create_autospec(socket.socket)) as s: + # we connect to the first address + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 1 + assert addrs[0] == addrinfo[0][4][0] + + # the first lookup fails, succeeds on next check + s.side_effect = [socket.error, MagicMock()] + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 1 + assert addrs[0] == addrinfo[2][4][0] + + # attempt to connect to resolved address with default timeout + s.side_effect = socket.error + addrs = network._test_addrs(addrinfo, 80) + assert not len(addrs) == 0 + + # nothing can connect, but we've eliminated duplicates + s.side_effect = socket.error + addrs = network._test_addrs(addrinfo, 80) + assert len(addrs) == 5 + + +def test_is_subnet(): + for subnet_data in (IPV4_SUBNETS, IPV6_SUBNETS): + for item in subnet_data[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_subnet(item) + for item in subnet_data[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert not network.is_subnet(item) + + +def test_is_ipv4_subnet(): + for item in IPV4_SUBNETS[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_ipv4_subnet(item) + for item in IPV4_SUBNETS[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert not network.is_ipv4_subnet(item) + + +def test_is_ipv6_subnet(): + for item in IPV6_SUBNETS[True]: + log.debug("Testing that %s is a valid subnet", item) + assert network.is_ipv6_subnet(item) is True + for item in IPV6_SUBNETS[False]: + log.debug("Testing that %s is not a valid subnet", item) + assert network.is_ipv6_subnet(item) is False + + +@pytest.mark.parametrize( + "addr,expected", + ( + (24, "255.255.255.0"), + (21, "255.255.248.0"), + (17, "255.255.128.0"), + (9, "255.128.0.0"), + (36, ""), + ("lol", ""), + ), +) +def test_cidr_to_ipv4_netmask(addr, expected): + assert network.cidr_to_ipv4_netmask(addr) == expected + + +def test_number_of_set_bits_to_ipv4_netmask(): + set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFFFF00) + assert set_bits_to_netmask == "255.255.255.0" + set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFF6400) + assert set_bits_to_netmask == "255.255.100.0" + + +@pytest.mark.parametrize( + "hex_num,inversion,expected", + ( + ("0x4A7D2B63", False, "74.125.43.99"), + ("0x4A7D2B63", True, "99.43.125.74"), + ("00000000000000000000FFFF7F000001", False, "127.0.0.1"), + ("0000000000000000FFFF00000100007F", True, "127.0.0.1"), + ("20010DB8000000000000000000000000", False, "2001:db8::"), + ("B80D0120000000000000000000000000", True, "2001:db8::"), + ), +) +def test_hex2ip(hex_num, inversion, expected): + assert network.hex2ip(hex_num, inversion) == expected + + +def test_interfaces_ifconfig_linux(linux_interfaces_dict): + interfaces = network._interfaces_ifconfig(LINUX) + assert interfaces == linux_interfaces_dict + + +def test_interfaces_ifconfig_freebsd(freebsd_interfaces_dict): + interfaces = network._interfaces_ifconfig(FREEBSD) + assert interfaces == freebsd_interfaces_dict + + +def test_interfaces_ifconfig_solaris(): + with patch("salt.utils.platform.is_sunos", return_value=True): + expected_interfaces = { + "ilbint0": { + "inet6": [], + "inet": [ + { + "broadcast": "10.6.0.255", + "netmask": "255.255.255.0", + "address": "10.6.0.11", + } + ], + "up": True, + }, + "lo0": { + "inet6": [{"prefixlen": "128", "address": "::1"}], + "inet": [{"netmask": "255.0.0.0", "address": "127.0.0.1"}], + "up": True, + }, + "ilbext0": { + "inet6": [], + "inet": [ + { + "broadcast": "10.10.11.31", + "netmask": "255.255.255.224", + "address": "10.10.11.11", + }, + { + "broadcast": "10.10.11.31", + "netmask": "255.255.255.224", + "address": "10.10.11.12", + }, + ], + "up": True, + }, + "vpn0": { + "inet6": [], + "inet": [{"netmask": "255.0.0.0", "address": "10.6.0.14"}], + "up": True, + }, + "net0": { + "inet6": [{"prefixlen": "10", "address": "fe80::221:9bff:fefd:2a22"}], + "inet": [ + { + "broadcast": "10.10.10.63", + "netmask": "255.255.255.224", + "address": "10.10.10.38", + } + ], + "up": True, + }, + } + interfaces = network._interfaces_ifconfig(SOLARIS) + assert interfaces == expected_interfaces + + +def test_interfaces_ifconfig_netbsd(): + expected_interfaces = { + "lo0": { + "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], + "inet6": [{"address": "fe80::1", "prefixlen": "64", "scope": "lo0"}], + "up": True, + }, + "vioif0": { + "hwaddr": "00:a0:98:e6:83:18", + "inet": [ + { + "address": "192.168.1.80", + "broadcast": "192.168.1.255", + "netmask": "255.255.255.0", + } + ], + "inet6": [ + { + "address": "fe80::2a0:98ff:fee6:8318", + "prefixlen": "64", + "scope": "vioif0", + } + ], + "up": True, + }, + } + interfaces = network._netbsd_interfaces_ifconfig(NETBSD) + assert interfaces == expected_interfaces + + +def test_freebsd_remotes_on(): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_freebsd", return_value=True): + with patch("subprocess.check_output", return_value=FREEBSD_SOCKSTAT): + remotes = network._freebsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_freebsd_remotes_on_with_fat_pid(): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_freebsd", return_value=True): + with patch( + "subprocess.check_output", + return_value=FREEBSD_SOCKSTAT_WITH_FAT_PID, + ): + remotes = network._freebsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_netlink_tool_remote_on_a(): + with patch("salt.utils.platform.is_sunos", return_value=False): + with patch("salt.utils.platform.is_linux", return_value=True): + with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): + remotes = network._netlink_tool_remote_on("4506", "local_port") + assert remotes == {"192.168.122.177", "::ffff:127.0.0.1"} + + +def test_netlink_tool_remote_on_b(): + with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): + remotes = network._netlink_tool_remote_on("4505", "remote_port") + assert remotes == {"127.0.0.1", "::ffff:1.2.3.4"} + + +def test_openbsd_remotes_on(): + with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT): + remotes = network._openbsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_openbsd_remotes_on_issue_61966(): + """ + Test that the command output is correctly converted to string before + treating it as such + """ + with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT.encode()): + remotes = network._openbsd_remotes_on("4506", "remote") + assert remotes == {"127.0.0.1"} + + +def test_generate_minion_id_distinct(): + """ + Test if minion IDs are distinct in the pool. + """ + with patch("platform.node", MagicMock(return_value="nodename")), patch( + "socket.gethostname", MagicMock(return_value="hostname") + ), patch( + "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == [ + "hostname.domainname.blank", + "nodename", + "hostname", + "1.2.3.4", + "5.6.7.8", + ] + + +def test_generate_minion_id_127_name(): + """ + Test if minion IDs can be named 127.foo + """ + with patch("platform.node", MagicMock(return_value="127")), patch( + "socket.gethostname", MagicMock(return_value="127") + ), patch("socket.getfqdn", MagicMock(return_value="127.domainname.blank")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == [ + "127.domainname.blank", + "127", + "1.2.3.4", + "5.6.7.8", + ] + + +def test_generate_minion_id_127_name_startswith(): + """ + Test if minion IDs can be named starting from "127" + """ + expected = [ + "127890.domainname.blank", + "127890", + "1.2.3.4", + "5.6.7.8", + ] + with patch("platform.node", MagicMock(return_value="127890")), patch( + "socket.gethostname", MagicMock(return_value="127890") + ), patch( + "socket.getfqdn", MagicMock(return_value="127890.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), + ): + assert network._generate_minion_id() == expected + + +def test_generate_minion_id_duplicate(): + """ + Test if IP addresses in the minion IDs are distinct in the pool + """ + expected = ["hostname", "1.2.3.4"] + with patch("platform.node", MagicMock(return_value="hostname")), patch( + "socket.gethostname", MagicMock(return_value="hostname") + ), patch("socket.getfqdn", MagicMock(return_value="hostname")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network._generate_minion_id() == expected + + +def test_generate_minion_id_platform_used(): + """ + Test if platform.node is used for the first occurrence. + The platform.node is most common hostname resolver before anything else. + """ + with patch( + "platform.node", MagicMock(return_value="very.long.and.complex.domain.name") + ), patch("socket.gethostname", MagicMock(return_value="hostname")), patch( + "socket.getfqdn", MagicMock(return_value="") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "very.long.and.complex.domain.name" + + +def test_generate_minion_id_platform_localhost_filtered(): + """ + Test if localhost is filtered from the first occurrence. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="pick.me") + ), patch( + "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "hostname.domainname.blank" + + +def test_generate_minion_id_platform_localhost_filtered_all(): + """ + Test if any of the localhost is filtered from everywhere. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "1.2.3.4" + + +def test_generate_minion_id_platform_localhost_only(): + """ + Test if there is no other choice but localhost. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "localhost" + + +def test_generate_minion_id_platform_fqdn(): + """ + Test if fqdn is picked up. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "pick.me" + + +def test_generate_minion_id_platform_localhost_addrinfo(): + """ + Test if addinfo is picked up. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), + ): + assert network.generate_minion_id() == "pick.me" + + +def test_generate_minion_id_platform_ip_addr_only(): + """ + Test if IP address is the only what is used as a Minion ID in case no DNS name. + """ + with patch("platform.node", MagicMock(return_value="localhost")), patch( + "socket.gethostname", MagicMock(return_value="ip6-loopback") + ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), + ), patch( + "salt.utils.files.fopen", mock_open() + ), patch( + "salt.utils.network.ip_addrs", + MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"]), + ): + assert network.generate_minion_id() == "1.2.3.4" + + +def test_gen_mac(): + expected_mac = "00:16:3E:01:01:01" + with patch("random.randint", return_value=1) as random_mock: + assert random_mock.return_value == 1 + ret = network.gen_mac("00:16:3E") + assert ret == expected_mac + + +@pytest.mark.parametrize( + "mac_addr", + ( + ("31337"), + ("0001020304056"), + ("00:01:02:03:04:056"), + ("a0:b0:c0:d0:e0:fg"), + ), +) +def test_mac_str_to_bytes_exceptions(mac_addr): + with pytest.raises(ValueError): + network.mac_str_to_bytes(mac_addr) + + +def test_mac_str_to_bytes(): + assert network.mac_str_to_bytes("100806040200") == b"\x10\x08\x06\x04\x02\x00" + assert network.mac_str_to_bytes("f8e7d6c5b4a3") == b"\xf8\xe7\xd6\xc5\xb4\xa3" + + +@pytest.mark.slow_test +def test_generate_minion_id_with_long_hostname(): + """ + Validate the fix for: + + https://github.com/saltstack/salt/issues/51160 + """ + long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz" + with patch("socket.gethostname", MagicMock(return_value=long_name)): + # An exception is raised if unicode is passed to socket.getfqdn + minion_id = network.generate_minion_id() + assert minion_id != "" + + +def test_filter_by_networks_with_no_filter(): + ips = ["10.0.123.200", "10.10.10.10"] + with pytest.raises(TypeError): + network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter + + +def test_filter_by_networks_empty_filter(): + ips = ["10.0.123.200", "10.10.10.10"] + assert network.filter_by_networks(ips, []) == [] + + +def test_filter_by_networks_ips_list(): + ips = [ + "10.0.123.200", + "10.10.10.10", + "193.124.233.5", + "fe80::d210:cf3f:64e7:5423", + ] + expected = [ + "10.0.123.200", + "10.10.10.10", + "fe80::d210:cf3f:64e7:5423", + ] + networks = ["10.0.0.0/8", "fe80::/64"] + assert network.filter_by_networks(ips, networks) == expected + + +def test_filter_by_networks_interfaces_dict(): + interfaces = { + "wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"], + "eth0": [ + "2001:0DB8:0:CD30:123:4567:89AB:CDEF", + "192.168.1.101", + "10.0.123.201", + ], + } + expected = { + "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], + "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], + } + ret = network.filter_by_networks(interfaces, ["192.168.1.0/24", "2001:db8::/48"]) + assert ret == expected + + +def test_filter_by_networks_catch_all(): + ips = [ + "10.0.123.200", + "10.10.10.10", + "193.124.233.5", + "fe80::d210:cf3f:64e7:5423", + ] + assert network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) == ips + + +def test_ip_networks(): + # We don't need to test with each platform's ifconfig/iproute2 output, + # since this test isn't testing getting the interfaces. We already have + # tests for that. + interface_data = network._interfaces_ifconfig(LINUX) + + # Without loopback + ret = network.ip_networks(interface_data=interface_data) + assert ret == ["10.10.8.0/22"] + # Without loopback, specific interface + ret = network.ip_networks(interface="eth0", interface_data=interface_data) + assert ret == ["10.10.8.0/22"] + # Without loopback, multiple specific interfaces + ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) + assert ret == ["10.10.8.0/22"] + # Without loopback, specific interface (not present) + ret = network.ip_networks(interface="eth1", interface_data=interface_data) + assert ret == [] + # With loopback + ret = network.ip_networks(include_loopback=True, interface_data=interface_data) + assert ret == ["10.10.8.0/22", "127.0.0.0/8"] + # With loopback, specific interface + ret = network.ip_networks( + interface="eth0", include_loopback=True, interface_data=interface_data + ) + assert ret == ["10.10.8.0/22"] + # With loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", include_loopback=True, interface_data=interface_data + ) + assert ret == ["10.10.8.0/22", "127.0.0.0/8"] + # With loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", include_loopback=True, interface_data=interface_data + ) + assert ret == [] + + # Verbose, without loopback + ret = network.ip_networks(verbose=True, interface_data=interface_data) + expected_ret1 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + } + assert ret == expected_ret1 + + # Verbose, without loopback, specific interface + ret = network.ip_networks( + interface="eth0", verbose=True, interface_data=interface_data + ) + expected_ret2 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + } + assert ret == expected_ret2 + + # Verbose, without loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", verbose=True, interface_data=interface_data + ) + expected_ret3 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + } + assert ret == expected_ret3 + + # Verbose, without loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", verbose=True, interface_data=interface_data + ) + assert ret == {} + # Verbose, with loopback + ret = network.ip_networks( + include_loopback=True, verbose=True, interface_data=interface_data + ) + expected_ret4 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + "127.0.0.0/8": { + "prefixlen": 8, + "netmask": "255.0.0.0", + "num_addresses": 16777216, + "address": "127.0.0.0", + }, + } + assert ret == expected_ret4 + + # Verbose, with loopback, specific interface + ret = network.ip_networks( + interface="eth0", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + expected_ret5 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + } + assert ret == expected_ret5 + + # Verbose, with loopback, multiple specific interfaces + ret = network.ip_networks( + interface="eth0,lo", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + expected_ret6 = { + "10.10.8.0/22": { + "prefixlen": 22, + "netmask": "255.255.252.0", + "num_addresses": 1024, + "address": "10.10.8.0", + }, + "127.0.0.0/8": { + "prefixlen": 8, + "netmask": "255.0.0.0", + "num_addresses": 16777216, + "address": "127.0.0.0", + }, + } + assert ret == expected_ret6 + + # Verbose, with loopback, specific interface (not present) + ret = network.ip_networks( + interface="eth1", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == {} + + +def test_ip_networks6(): + # We don't need to test with each platform's ifconfig/iproute2 output, + # since this test isn't testing getting the interfaces. We already have + # tests for that. + interface_data = network._interfaces_ifconfig(LINUX) + + # Without loopback + ret = network.ip_networks6(interface_data=interface_data) + assert ret == ["fe80::/64"] + # Without loopback, specific interface + ret = network.ip_networks6(interface="eth0", interface_data=interface_data) + assert ret == ["fe80::/64"] + # Without loopback, multiple specific interfaces + ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) + assert ret == ["fe80::/64"] + # Without loopback, specific interface (not present) + ret = network.ip_networks6(interface="eth1", interface_data=interface_data) + assert ret == [] + # With loopback + ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) + assert ret == ["::1/128", "fe80::/64"] + # With loopback, specific interface + ret = network.ip_networks6( + interface="eth0", include_loopback=True, interface_data=interface_data + ) + assert ret == ["fe80::/64"] + # With loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", include_loopback=True, interface_data=interface_data + ) + assert ret == ["::1/128", "fe80::/64"] + # With loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", include_loopback=True, interface_data=interface_data + ) + assert ret == [] + + # Verbose, without loopback + ret = network.ip_networks6(verbose=True, interface_data=interface_data) + expected_ret1 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + } + assert ret == expected_ret1 + + # Verbose, without loopback, specific interface + ret = network.ip_networks6( + interface="eth0", verbose=True, interface_data=interface_data + ) + expected_ret2 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + } + assert ret == expected_ret2 + + # Verbose, without loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", verbose=True, interface_data=interface_data + ) + expected_ret3 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + } + assert ret == expected_ret3 + + # Verbose, without loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", verbose=True, interface_data=interface_data + ) + assert ret == {} + + # Verbose, with loopback + ret = network.ip_networks6( + include_loopback=True, verbose=True, interface_data=interface_data + ) + expected_ret4 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + "::1/128": { + "prefixlen": 128, + "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + "num_addresses": 1, + "address": "::1", + }, + } + assert ret == expected_ret4 + + # Verbose, with loopback, specific interface + ret = network.ip_networks6( + interface="eth0", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + expected_ret5 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + } + assert ret == expected_ret5 + + # Verbose, with loopback, multiple specific interfaces + ret = network.ip_networks6( + interface="eth0,lo", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + expected_ret6 = { + "fe80::/64": { + "prefixlen": 64, + "netmask": "ffff:ffff:ffff:ffff::", + "num_addresses": 18446744073709551616, + "address": "fe80::", + }, + "::1/128": { + "prefixlen": 128, + "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", + "num_addresses": 1, + "address": "::1", + }, + } + assert ret == expected_ret6 + + # Verbose, with loopback, specific interface (not present) + ret = network.ip_networks6( + interface="eth1", + include_loopback=True, + verbose=True, + interface_data=interface_data, + ) + assert ret == {} + + +def test_get_fqhostname_return(): + """ + Test if proper hostname is used when RevDNS differ from hostname + """ + with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( + "socket.getfqdn", + MagicMock(return_value="very.long.and.complex.domain.name"), + ), patch( + "socket.getaddrinfo", + MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), + ): + assert network.get_fqhostname() == "hostname" + + +def test_get_fqhostname_return_empty_hostname(): + """ + Test if proper hostname is used when hostname returns empty string + """ + host = "hostname" + with patch("socket.gethostname", MagicMock(return_value=host)), patch( + "socket.getfqdn", + MagicMock(return_value="very.long.and.complex.domain.name"), + ), patch( + "socket.getaddrinfo", + MagicMock( + return_value=[ + (2, 3, 0, host, ("127.0.1.1", 0)), + (2, 3, 0, "", ("127.0.1.1", 0)), + ] + ), + ): + assert network.get_fqhostname() == host + + +@pytest.mark.parametrize( + "addr,expected,strip", + ( + ("127.0.0.1", "127.0.0.1", False), + ("[::1]", "::1", True), + ("::1", "[::1]", False), + ("[::1]", "[::1]", False), + (ipaddress.ip_address("127.0.0.1"), "127.0.0.1", False), + ), +) +def test_ip_bracket(addr, expected, strip): + assert network.ip_bracket(addr, strip=strip) == expected + + +def test_junos_ifconfig_output_parsing(): + ret = network._junos_interfaces_ifconfig("inet mtu 0 local=" + " " * 3456) assert ret == {"inet": {"up": False}} + + +def test_isportopen_false(): + ret = network.isportopen("127.0.0.1", "66000") + assert ret is False + + +def test_isportopen(): + ret = network.isportopen("127.0.0.1", "22") + assert ret == 0 + + +def test_get_socket(): + ret = network.get_socket("127.0.0.1") + assert ret.family == socket.AF_INET + assert ret.type == socket.SOCK_STREAM + + ret = network.get_socket("2001:a71::1") + assert ret.family == socket.AF_INET6 + assert ret.type == socket.SOCK_STREAM + + +def test_ip_to_host(grains): + ret = network.ip_to_host("127.0.0.1") + if grains["oscodename"] == "Photon": + # Photon returns this for IPv4 + assert ret == "ipv6-localhost" + else: + assert ret == "localhost" + + ret = network.ip_to_host("2001:a71::1") + assert ret is None + + ret = network.ip_to_host("::1") + if grains["os"] == "Amazon": + assert ret == "localhost6" + elif grains["os_family"] == "Debian": + if grains["osmajorrelease"] == 12: + assert ret == "localhost" + else: + assert ret == "ip6-localhost" + elif grains["os_family"] == "RedHat": + if grains["oscodename"] == "Photon": + assert ret == "ipv6-localhost" + else: + assert ret == "localhost" + elif grains["os_family"] == "Arch": + if grains.get("osmajorrelease", None) is None: + # running doesn't have osmajorrelease grains + assert ret == "localhost" + else: + assert ret == "ip6-localhost" + else: + assert ret == "localhost" + + +@pytest.mark.parametrize( + "addr,fmtr,expected", + ( + ("192.168.0.115", "prefixlen", "/24"), + ("192.168.1.80", "prefixlen", "/24"), + ("10.10.10.250", "prefixlen", "/8"), + ("192.168.0.115", "netmask", "255.255.255.0"), + ("192.168.1.80", "netmask", "255.255.255.0"), + ("10.10.10.250", "netmask", "255.0.0.0"), + ), +) +def test_natural_ipv4_netmask(addr, fmtr, expected): + assert network.natural_ipv4_netmask(addr, fmt=fmtr) == expected + + +@pytest.mark.parametrize( + "addr,expected", + ( + ("127.0", "127.0.0.0"), + ("192.168.3", "192.168.3.0"), + ("10.209", "10.209.0.0"), + ), +) +def test_rpad_ipv4_network(addr, expected): + assert network.rpad_ipv4_network(addr) == expected + + +def test_hw_addr(linux_interfaces_dict, freebsd_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + hw_addrs = network.hw_addr("eth0") + assert hw_addrs == "e0:3f:49:85:6a:af" + + with patch( + "salt.utils.network.interfaces", MagicMock(return_value=freebsd_interfaces_dict) + ), patch("salt.utils.platform.is_netbsd", MagicMock(return_value=True)): + hw_addrs = network.hw_addr("em0") + assert hw_addrs == "00:30:48:ff:ff:ff" + + hw_addrs = network.hw_addr("em1") + assert hw_addrs == "00:30:48:aa:aa:aa" + + hw_addrs = network.hw_addr("dog") + assert ( + hw_addrs + == 'Interface "dog" not in available interfaces: "", "em0", "em1", "lo0", "plip0", "tun0"' + ) + + +def test_interface_and_ip(linux_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + expected = [ + { + "address": "10.10.10.56", + "broadcast": "10.10.10.255", + "netmask": "255.255.252.0", + } + ] + ret = network.interface("eth0") + assert ret == expected + + ret = network.interface("dog") + assert ret == 'Interface "dog" not in available interfaces: "eth0", "lo"' + + ret = network.interface_ip("eth0") + assert ret == "10.10.10.56" + + ret = network.interface_ip("dog") + assert ret == 'Interface "dog" not in available interfaces: "eth0", "lo"' + + +def test_subnets(linux_interfaces_dict): + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.subnets() + assert ret == ["10.10.8.0/22"] + + ret = network.subnets6() + assert ret == ["fe80::/64"] + + +def test_in_subnet(caplog): + assert network.in_subnet("fe80::/64", "fe80::e23f:49ff:fe85:6aaf") + assert network.in_subnet("10.10.8.0/22", "10.10.10.56") + assert not network.in_subnet("10.10.8.0/22") + + caplog.clear() + ret = network.in_subnet("10.10.8.0/40") + assert "Invalid CIDR '10.10.8.0/40'" in caplog.text + assert not ret + + +def test_ip_addrs(linux_interfaces_dict): + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.ip_addrs("eth0") + assert ret == ["10.10.10.56"] + + with patch( + "salt.utils.network.linux_interfaces", + MagicMock(return_value=linux_interfaces_dict), + ): + ret = network.ip_addrs6("eth0") + assert ret == ["fe80::e23f:49ff:fe85:6aaf"] diff --git a/tests/pytests/unit/utils/test_platform.py b/tests/pytests/unit/utils/test_platform.py new file mode 100644 index 000000000000..2d9c74b23987 --- /dev/null +++ b/tests/pytests/unit/utils/test_platform.py @@ -0,0 +1,47 @@ +import subprocess + +import salt.utils.platform +from tests.support.mock import patch + + +def test_linux_distribution(): + """ + Test that when `distro` fails with a `subprocess.CalledProcessError` salt + returns empty strings as default values. + """ + distro_name = "Salt" + distro_version = "1" + distro_codename = "Awesome" + with patch("distro.name", return_value=distro_name): + with patch("distro.version", return_value=distro_version), patch( + "distro.codename", return_value=distro_codename + ): + assert salt.utils.platform.linux_distribution() == ( + distro_name, + distro_version, + distro_codename, + ) + + distro_version = "" + with patch( + "distro.version", + side_effect=subprocess.CalledProcessError(returncode=1, cmd=["foo"]), + ), patch("distro.codename", return_value=distro_codename): + assert salt.utils.platform.linux_distribution() == ( + distro_name, + distro_version, + distro_codename, + ) + distro_codename = "" + with patch( + "distro.version", + side_effect=subprocess.CalledProcessError(returncode=1, cmd=["foo"]), + ), patch( + "distro.codename", + side_effect=subprocess.CalledProcessError(returncode=1, cmd=["foo"]), + ): + assert salt.utils.platform.linux_distribution() == ( + distro_name, + distro_version, + distro_codename, + ) diff --git a/tests/pytests/unit/utils/test_rsax931.py b/tests/pytests/unit/utils/test_rsax931.py new file mode 100644 index 000000000000..a1c81e653f86 --- /dev/null +++ b/tests/pytests/unit/utils/test_rsax931.py @@ -0,0 +1,320 @@ +""" +Test the RSA ANSI X9.31 signer and verifier +""" + +import ctypes +import ctypes.util +import fnmatch +import glob +import os +import platform +import sys + +import pytest + +import salt.utils.platform +from salt.utils.rsax931 import ( + RSAX931Signer, + RSAX931Verifier, + _find_libcrypto, + _load_libcrypto, +) +from tests.support.mock import patch + + +@pytest.fixture +def privkey_data(): + return ( + "-----BEGIN RSA PRIVATE KEY-----\n" + "MIIEpAIBAAKCAQEA75GR6ZTv5JOv90Vq8tKhKC7YQnhDIo2hM0HVziTEk5R4UQBW\n" + "a0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD4ZMsYqLzqjWMekLC8bjhxc+EuPo9\n" + "Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R4hOcMMZNZdi0xLtFoTfwU61UPfFX\n" + "14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttLP3sMXJvc3EvM0JiDVj4l1TWFUHHz\n" + "eFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k6ai4tVzwkTmV5PsriP1ju88Lo3MB\n" + "4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWHAQIDAQABAoIBAGOzBzBYZUWRGOgl\n" + "IY8QjTT12dY/ymC05GM6gMobjxuD7FZ5d32HDLu/QrknfS3kKlFPUQGDAbQhbbb0\n" + "zw6VL5NO9mfOPO2W/3FaG1sRgBQcerWonoSSSn8OJwVBHMFLG3a+U1Zh1UvPoiPK\n" + "S734swIM+zFpNYivGPvOm/muF/waFf8tF/47t1cwt/JGXYQnkG/P7z0vp47Irpsb\n" + "Yjw7vPe4BnbY6SppSxscW3KoV7GtJLFKIxAXbxsuJMF/rYe3O3w2VKJ1Sug1VDJl\n" + "/GytwAkSUer84WwP2b07Wn4c5pCnmLslMgXCLkENgi1NnJMhYVOnckxGDZk54hqP\n" + "9RbLnkkCgYEA/yKuWEvgdzYRYkqpzB0l9ka7Y00CV4Dha9Of6GjQi9i4VCJ/UFVr\n" + "UlhTo5y0ZzpcDAPcoZf5CFZsD90a/BpQ3YTtdln2MMCL/Kr3QFmetkmDrt+3wYnX\n" + "sKESfsa2nZdOATRpl1antpwyD4RzsAeOPwBiACj4fkq5iZJBSI0bxrMCgYEA8GFi\n" + "qAjgKh81/Uai6KWTOW2kX02LEMVRrnZLQ9VPPLGid4KZDDk1/dEfxjjkcyOxX1Ux\n" + "Klu4W8ZEdZyzPcJrfk7PdopfGOfrhWzkREK9C40H7ou/1jUecq/STPfSOmxh3Y+D\n" + "ifMNO6z4sQAHx8VaHaxVsJ7SGR/spr0pkZL+NXsCgYEA84rIgBKWB1W+TGRXJzdf\n" + "yHIGaCjXpm2pQMN3LmP3RrcuZWm0vBt94dHcrR5l+u/zc6iwEDTAjJvqdU4rdyEr\n" + "tfkwr7v6TNlQB3WvpWanIPyVzfVSNFX/ZWSsAgZvxYjr9ixw6vzWBXOeOb/Gqu7b\n" + "cvpLkjmJ0wxDhbXtyXKhZA8CgYBZyvcQb+hUs732M4mtQBSD0kohc5TsGdlOQ1AQ\n" + "McFcmbpnzDghkclyW8jzwdLMk9uxEeDAwuxWE/UEvhlSi6qdzxC+Zifp5NBc0fVe\n" + "7lMx2mfJGxj5CnSqQLVdHQHB4zSXkAGB6XHbBd0MOUeuvzDPfs2voVQ4IG3FR0oc\n" + "3/znuwKBgQChZGH3McQcxmLA28aUwOVbWssfXKdDCsiJO+PEXXlL0maO3SbnFn+Q\n" + "Tyf8oHI5cdP7AbwDSx9bUfRPjg9dKKmATBFr2bn216pjGxK0OjYOCntFTVr0psRB\n" + "CrKg52Qrq71/2l4V2NLQZU40Dr1bN9V+Ftd9L0pvpCAEAWpIbLXGDw==\n" + "-----END RSA PRIVATE KEY-----" + ) + + +@pytest.fixture +def pubkey_data(): + return ( + "-----BEGIN PUBLIC KEY-----\n" + "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA75GR6ZTv5JOv90Vq8tKh\n" + "KC7YQnhDIo2hM0HVziTEk5R4UQBWa0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD\n" + "4ZMsYqLzqjWMekLC8bjhxc+EuPo9Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R\n" + "4hOcMMZNZdi0xLtFoTfwU61UPfFX14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttL\n" + "P3sMXJvc3EvM0JiDVj4l1TWFUHHzeFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k\n" + "6ai4tVzwkTmV5PsriP1ju88Lo3MB4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWH\n" + "AQIDAQAB\n" + "-----END PUBLIC KEY-----" + ) + + +@pytest.fixture +def hello_world(): + return b"hello, world" + + +@pytest.fixture +def hello_world_sig(): + return ( + b"\x63\xa0\x70\xd2\xe4\xd4\x6b\x8a\xa2\x59\x27\x5f\x00\x69" + b"\x1e\x3c\x50\xed\x50\x13\x09\x80\xe3\x47\x4e\x14\xb5\x7c" + b"\x07\x26\x4e\x20\x74\xea\x0e\xf8\xda\xff\x1e\x57\x8c\x67" + b"\x76\x73\xaa\xea\x0f\x0a\xe7\xa2\xe3\x88\xfc\x09\x87\x36" + b"\x01\x3a\xb7\x4c\x40\xe0\xf4\x54\xc5\xf1\xaa\xb2\x1d\x7f" + b"\xb6\xd3\xa8\xdd\x28\x69\x8b\x88\xe4\x42\x1e\x48\x3e\x1f" + b"\xe2\x2b\x3c\x7c\x85\x11\xe9\x59\xd7\xf3\xc2\x21\xd3\x55" + b"\xcb\x9c\x3c\x93\xcc\x20\xdf\x64\x81\xd0\x0d\xbf\x8e\x8d" + b"\x47\xec\x1d\x9e\x27\xec\x12\xed\x8b\x5f\xd6\x1d\xec\x8d" + b"\x77\x5a\x58\x8a\x24\xb6\x0f\x12\xb7\x51\xef\x7d\x85\x0f" + b"\x49\x39\x02\x81\x15\x08\x70\xd6\xe0\x0b\x31\xff\x5f\xf9" + b"\xd1\x92\x38\x59\x8c\x22\x9c\xbb\xbf\xcf\x85\x34\xe2\x47" + b"\xf5\xe2\xaa\xb4\x62\x33\x3c\x13\x78\x33\x87\x08\x9e\xb5" + b"\xbc\x5d\xc1\xbf\x79\x7c\xfa\x5f\x06\x6a\x3b\x17\x40\x09" + b"\xb9\x09\xbf\x32\xc3\x00\xe2\xbc\x91\x77\x14\xa5\x23\xf5" + b"\xf5\xf1\x09\x12\x38\xda\x3b\x6a\x82\x81\x7b\x5e\x1c\xcb" + b"\xaa\x36\x9b\x08\x36\x03\x14\x96\xa3\x31\x39\x59\x16\x75" + b"\xc9\xb6\x66\x94\x1b\x97\xff\xc8\xa1\xe3\x21\x35\x23\x06" + b"\x4c\x9b\xf4\xee" + ) + + +def test_signer(privkey_data, pubkey_data, hello_world, hello_world_sig): + with pytest.raises(ValueError): + signer = RSAX931Signer("bogus key data") + with pytest.raises(ValueError): + signer = RSAX931Signer(pubkey_data) + + signer = RSAX931Signer(privkey_data) + with pytest.raises(ValueError): + signer.sign("x" * 255) # message too long + + sig = signer.sign(hello_world) + assert hello_world_sig == sig + + +def test_verifier(privkey_data, pubkey_data, hello_world, hello_world_sig): + with pytest.raises(ValueError): + verifier = RSAX931Verifier("bogus key data") + with pytest.raises(ValueError): + verifier = RSAX931Verifier(privkey_data) + + verifier = RSAX931Verifier(pubkey_data) + with pytest.raises(ValueError): + verifier.verify("") + with pytest.raises(ValueError): + verifier.verify(hello_world_sig + b"junk") + + msg = verifier.verify(hello_world_sig) + assert hello_world == msg + + +@pytest.mark.skip_unless_on_windows +def test_find_libcrypto_win32(): + """ + Test _find_libcrypto on Windows hosts. + """ + lib_path = _find_libcrypto() + assert "libcrypto" in lib_path + + +@pytest.mark.skip_unless_on_smartos +def test_find_libcrypto_smartos(): + """ + Test _find_libcrypto on a SmartOS host. + """ + lib_path = _find_libcrypto() + assert fnmatch.fnmatch( + lib_path, os.path.join(os.path.dirname(sys.executable), "libcrypto*") + ) + + +@pytest.mark.skip_unless_on_sunos +def test_find_libcrypto_sunos(): + """ + Test _find_libcrypto on a Solaris-like host. + """ + lib_path = _find_libcrypto() + passed = False + for i in ("/opt/local/lib/libcrypto.so*", "/opt/tools/lib/libcrypto.so*"): + if fnmatch.fnmatch(lib_path, i): + passed = True + break + assert passed + + +@pytest.mark.skip_unless_on_aix +def test_find_libcrypto_aix(): + """ + Test _find_libcrypto on an IBM AIX host. + """ + lib_path = _find_libcrypto() + if os.path.isdir("/opt/salt/lib"): + assert fnmatch.fnmatch(lib_path, "/opt/salt/lib/libcrypto.so*") + else: + assert fnmatch.fnmatch(lib_path, "/opt/freeware/lib/libcrypto.so*") + + +def test_find_libcrypto_with_system_before_catalina(): + """ + Test _find_libcrypto on a pre-Catalina macOS host by simulating not + finding any other libcryptos and verifying that it defaults to system. + """ + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("10.14.2", (), "") + ), patch.object(glob, "glob", lambda _: []), patch.object( + sys, "platform", "macosx" + ): + lib_path = _find_libcrypto() + assert lib_path == "/usr/lib/libcrypto.dylib" + + +def test_find_libcrypto_darwin_catalina(): + """ + Test _find_libcrypto on a macOS Catalina host where there are no custom + libcryptos and defaulting to the versioned system libraries. + """ + available = [ + "/usr/lib/libcrypto.0.9.7.dylib", + "/usr/lib/libcrypto.0.9.8.dylib", + "/usr/lib/libcrypto.35.dylib", + "/usr/lib/libcrypto.41.dylib", + "/usr/lib/libcrypto.42.dylib", + "/usr/lib/libcrypto.44.dylib", + "/usr/lib/libcrypto.dylib", + ] + + def test_glob(pattern): + return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] + + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("10.15.2", (), "") + ), patch.object(sys, "platform", "macosx"), patch.object(glob, "glob", test_glob): + lib_path = _find_libcrypto() + assert "/usr/lib/libcrypto.44.dylib" == lib_path + + +def test_find_libcrypto_darwin_bigsur_packaged(): + """ + Test _find_libcrypto on a Darwin-like macOS host where there isn't a + lacation returned by ctypes.util.find_library() and the libcrypto + installation comes from a package manager (ports, brew, salt). + """ + managed_paths = { + "salt": "/opt/salt/lib/libcrypto.dylib", + "brew": "/test/homebrew/prefix/opt/openssl/lib/libcrypto.dylib", + "port": "/opt/local/lib/libcrypto.dylib", + } + + saved_getenv = os.getenv + + def mock_getenv(env): + def test_getenv(var, default=None): + return env.get(var, saved_getenv(var, default)) + + return test_getenv + + def mock_glob(expected_lib): + def test_glob(pattern): + if fnmatch.fnmatch(expected_lib, pattern): + return [expected_lib] + return [] + + return test_glob + + with patch.object(salt.utils.platform, "is_darwin", lambda: True), patch.object( + platform, "mac_ver", lambda: ("11.2.2", (), "") + ), patch.object(sys, "platform", "macosx"): + for package_manager, expected_lib in managed_paths.items(): + if package_manager == "brew": + env = {"HOMEBREW_PREFIX": "/test/homebrew/prefix"} + else: + env = {"HOMEBREW_PREFIX": ""} + with patch.object(os, "getenv", mock_getenv(env)): + with patch.object(glob, "glob", mock_glob(expected_lib)): + lib_path = _find_libcrypto() + + assert expected_lib == lib_path + + # On Big Sur, there's nothing else to fall back on. + with patch.object(glob, "glob", lambda _: []): + with pytest.raises(OSError): + lib_path = _find_libcrypto() + + +def test_find_libcrypto_unsupported(): + """ + Ensure that _find_libcrypto works correctly on an unsupported host OS. + """ + with patch.object(ctypes.util, "find_library", lambda a: None), patch.object( + glob, "glob", lambda a: [] + ), patch.object(sys, "platform", "unknown"), patch.object( + salt.utils.platform, "is_darwin", lambda: False + ), pytest.raises( + OSError + ): + _find_libcrypto() + + +def test_load_libcrypto(): + """ + Test _load_libcrypto generically. + """ + lib = _load_libcrypto() + assert isinstance(lib, ctypes.CDLL) + # Try to cover both pre and post OpenSSL 1.1. + assert ( + hasattr(lib, "OpenSSL_version_num") + or hasattr(lib, "OPENSSL_init_crypto") + or hasattr(lib, "OPENSSL_no_config") + ) + + +def test_find_libcrypto_darwin_onedir(): + """ + Test _find_libcrypto on a macOS + libcryptos and defaulting to the versioned system libraries. + """ + available = [ + "/usr/lib/libcrypto.0.9.7.dylib", + "/usr/lib/libcrypto.0.9.8.dylib", + "/usr/lib/libcrypto.35.dylib", + "/usr/lib/libcrypto.41.dylib", + "/usr/lib/libcrypto.42.dylib", + "/usr/lib/libcrypto.44.dylib", + "/test/homebrew/prefix/opt/openssl/lib/libcrypto.dylib", + "/opt/local/lib/libcrypto.dylib", + "lib/libcrypto.dylib", + ] + + def test_glob(pattern): + return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] + + with patch.object(glob, "glob", test_glob), patch.object( + salt.utils.platform, "is_darwin", lambda: True + ), patch.object(platform, "mac_ver", lambda: ("10.15.2", (), "")), patch.object( + sys, "platform", "macosx" + ): + lib_path = _find_libcrypto() + assert "lib/libcrypto.dylib" == lib_path diff --git a/tests/pytests/unit/utils/test_user.py b/tests/pytests/unit/utils/test_user.py new file mode 100644 index 000000000000..17c6b1551f5e --- /dev/null +++ b/tests/pytests/unit/utils/test_user.py @@ -0,0 +1,29 @@ +from types import SimpleNamespace + +import pytest + +from tests.support.mock import MagicMock, patch + +pytest.importorskip("grp") + +import grp + +import salt.utils.user + + +def test_get_group_list(): + getpwname = SimpleNamespace(pw_gid=1000) + getgrgid = MagicMock(side_effect=[SimpleNamespace(gr_name="remote")]) + group_lines = [ + ["games", "x", 50, []], + ["salt", "x", 1000, []], + ] + getgrall = [grp.struct_group(comps) for comps in group_lines] + with patch("os.getgrouplist", MagicMock(return_value=[50, 1000, 12000])), patch( + "pwd.getpwnam", MagicMock(return_value=getpwname) + ), patch("salt.utils.user._getgrall", MagicMock(return_value=getgrall)), patch( + "grp.getgrgid", getgrgid + ): + group_list = salt.utils.user.get_group_list("salt") + assert group_list == ["games", "remote", "salt"] + getgrgid.assert_called_once() diff --git a/tests/pytests/unit/utils/test_verify.py b/tests/pytests/unit/utils/test_verify.py deleted file mode 100644 index 9f98c00f2993..000000000000 --- a/tests/pytests/unit/utils/test_verify.py +++ /dev/null @@ -1,70 +0,0 @@ -import os - -import pytest - -import salt.utils.verify -from tests.support.mock import patch - - -@pytest.mark.skip_on_windows(reason="Not applicable for Windows.") -def test_verify_env_race_condition(): - def _stat(path): - """ - Helper function for mock_stat, we want to raise errors for specific paths, but not until we get into the proper path. - Until then, just return plain os.stat_result - """ - if path in ("/tmp/salt-dir/.file3", "/tmp/salt-dir/.dir3"): - raise AssertionError("The .file3 and .dir3 paths should never be called!") - - if path in ("/tmp/salt-dir/file1", "/tmp/salt-dir/dir1"): - raise FileNotFoundError( - "[Errno 2] No such file or directory: this exception should not be visible" - ) - - # we need to return at least different st_uid in order to trigger chown for these paths - if path in ("/tmp/salt-dir/file4", "/tmp/salt-dir/dir4"): - return os.stat_result([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) - - return os.stat_result([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) - - def _chown(path, uid, gid): - if path in ("/tmp/salt-dir/file4", "/tmp/salt-dir/dir4"): - raise FileNotFoundError( - "[Errno 2] No such file or directory: this exception should not be visible" - ) - - return - - with patch("os.chown", side_effect=_chown) as mock_chown, patch( - "os.stat", side_effect=_stat - ) as mock_stat, patch( - "salt.utils.verify._get_pwnam", return_value=(None, None, 0, 0) - ), patch( - "os.getuid", return_value=0 - ), patch( - "os.listdir", return_value=["subdir"] - ), patch( - "os.path.isdir", return_value=True - ), patch( - "salt.utils.path.os_walk", - return_value=[ - ( - "/tmp/salt-dir", - ["dir1", "dir2", ".dir3", "dir4"], - ["file1", "file2", ".file3", "file4"], - ) - ], - ): - - # verify this runs without issues, even though FNFE is raised - salt.utils.verify.verify_env(["/tmp/salt-dir"], "root", skip_extra=True) - - # and verify it got actually called with the valid paths - mock_stat.assert_any_call("/tmp/salt-dir/file1") - mock_stat.assert_any_call("/tmp/salt-dir/dir1") - - mock_stat.assert_any_call("/tmp/salt-dir/file4") - mock_stat.assert_any_call("/tmp/salt-dir/dir4") - - mock_chown.assert_any_call("/tmp/salt-dir/file4", 0, 0) - mock_chown.assert_any_call("/tmp/salt-dir/dir4", 0, 0) diff --git a/tests/pytests/unit/utils/test_versions.py b/tests/pytests/unit/utils/test_versions.py index eed780123056..417bb9e36321 100644 --- a/tests/pytests/unit/utils/test_versions.py +++ b/tests/pytests/unit/utils/test_versions.py @@ -77,6 +77,8 @@ def test_cmp_strict(v1, v2, wanted): # Added by us ("3.10.0-514.el7", "3.10.0-514.6.1.el7", 1), ("2.2.2", "2.12.1", -1), + ("2.24.0", "2.23.0.windows.1", 1), + ("2.23.0.windows.2", "2.23.0.windows.1", 1), ), ) def test_cmp(v1, v2, wanted): diff --git a/tests/pytests/unit/utils/test_vt.py b/tests/pytests/unit/utils/test_vt.py index 438a6eb09c03..c31b25e623ce 100644 --- a/tests/pytests/unit/utils/test_vt.py +++ b/tests/pytests/unit/utils/test_vt.py @@ -1,3 +1,4 @@ +import logging import os import signal @@ -43,10 +44,13 @@ def test_log_sanitize(test_cmd, caplog): cmd, log_stdout=True, log_stderr=True, + log_stdout_level="debug", + log_stderr_level="debug", log_sanitize=password, stream_stdout=False, stream_stderr=False, ) - ret = term.recv() + with caplog.at_level(logging.DEBUG): + ret = term.recv() assert password not in caplog.text assert "******" in caplog.text diff --git a/tests/pytests/unit/utils/test_win_network.py b/tests/pytests/unit/utils/test_win_network.py new file mode 100644 index 000000000000..12df51099529 --- /dev/null +++ b/tests/pytests/unit/utils/test_win_network.py @@ -0,0 +1,337 @@ +import pytest + +import salt.utils.win_network as win_network +from tests.support.mock import MagicMock, patch + +pytestmark = [pytest.mark.skip_unless_on_windows] + + +class PhysicalAddress: + def __init__(self, address): + self.address = address + + def ToString(self): + return str(self.address) + + +class Interface: + """ + Mocked interface object + """ + + def __init__( + self, + i_address="02D5F1DD31E0", + i_description="Dell GigabitEthernet", + i_id="{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", + i_name="Ethernet", + i_receive_only=False, + i_status=1, + i_type=6, + ): + self.PhysicalAddress = PhysicalAddress(i_address) + self.Description = i_description + self.Id = i_id + self.Name = i_name + self.NetworkInterfaceType = i_type + self.IsReceiveOnly = i_receive_only + self.OperationalStatus = i_status + + def GetPhysicalAddress(self): + return self.PhysicalAddress + + +@pytest.fixture +def mock_ip_base(): + return MagicMock( + return_value={ + "dns_enabled": False, + "dns_suffix": "", + "dynamic_dns_enabled": False, + } + ) + + +@pytest.fixture +def mock_unicast(): + return MagicMock( + return_value={ + "ip_addresses": [ + { + "address": "172.18.87.49", + "broadcast": "172.18.87.63", + "loopback": "127.0.0.1", + "netmask": "255.255.255.240", + "prefix_length": 28, + "prefix_origin": "Manual", + "suffix_origin": "Manual", + } + ], + "ipv6_addresses": [ + { + "address": "fe80::e8a4:1224:5548:2b81", + "interface_index": 32, + "prefix_length": 64, + "prefix_origin": "WellKnown", + "suffix_origin": "Router", + } + ], + } + ) + + +@pytest.fixture +def mock_gateway(): + return MagicMock( + return_value={ + "ip_gateways": ["192.168.0.1"], + "ipv6_gateways": ["fe80::208:a2ff:fe0b:de70"], + } + ) + + +@pytest.fixture +def mock_dns(): + return MagicMock( + return_value={ + "ip_dns": ["10.4.0.1", "10.1.0.1", "8.8.8.8"], + "ipv6_dns": ["2600:740a:1:304::1"], + } + ) + + +@pytest.fixture +def mock_multicast(): + return MagicMock( + return_value={ + "ip_multicast": [ + "224.0.0.1", + "224.0.0.251", + "224.0.0.252", + "230.230.230.230", + "239.0.0.250", + "239.255.255.250", + ], + "ipv6_multicast": [ + "ff01::1", + "ff02::1", + "ff02::c", + "ff02::fb", + "ff02::1:3", + "ff02::1:ff0f:4c48", + "ff02::1:ffa6:f6e6", + ], + } + ) + + +@pytest.fixture +def mock_anycast(): + return MagicMock(return_value={"ip_anycast": [], "ipv6_anycast": []}) + + +@pytest.fixture +def mock_wins(): + return MagicMock(return_value={"ip_wins": []}) + + +def test_get_interface_info_dot_net( + mock_ip_base, + mock_unicast, + mock_gateway, + mock_dns, + mock_multicast, + mock_anycast, + mock_wins, +): + expected = { + "Ethernet": { + "alias": "Ethernet", + "description": "Dell GigabitEthernet", + "dns_enabled": False, + "dns_suffix": "", + "dynamic_dns_enabled": False, + "id": "{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", + "ip_addresses": [ + { + "address": "172.18.87.49", + "broadcast": "172.18.87.63", + "loopback": "127.0.0.1", + "netmask": "255.255.255.240", + "prefix_length": 28, + "prefix_origin": "Manual", + "suffix_origin": "Manual", + } + ], + "ip_anycast": [], + "ip_dns": ["10.4.0.1", "10.1.0.1", "8.8.8.8"], + "ip_gateways": ["192.168.0.1"], + "ip_multicast": [ + "224.0.0.1", + "224.0.0.251", + "224.0.0.252", + "230.230.230.230", + "239.0.0.250", + "239.255.255.250", + ], + "ip_wins": [], + "ipv6_addresses": [ + { + "address": "fe80::e8a4:1224:5548:2b81", + "interface_index": 32, + "prefix_length": 64, + "prefix_origin": "WellKnown", + "suffix_origin": "Router", + } + ], + "ipv6_anycast": [], + "ipv6_dns": ["2600:740a:1:304::1"], + "ipv6_gateways": ["fe80::208:a2ff:fe0b:de70"], + "ipv6_multicast": [ + "ff01::1", + "ff02::1", + "ff02::c", + "ff02::fb", + "ff02::1:3", + "ff02::1:ff0f:4c48", + "ff02::1:ffa6:f6e6", + ], + "physical_address": "02:D5:F1:DD:31:E0", + "receive_only": False, + "status": "Up", + "type": "Ethernet", + } + } + + mock_int = MagicMock(return_value=[Interface()]) + with patch.object(win_network, "_get_network_interfaces", mock_int), patch.object( + win_network, "_get_ip_base_properties", mock_ip_base + ), patch.object(win_network, "_get_ip_unicast_info", mock_unicast), patch.object( + win_network, "_get_ip_gateway_info", mock_gateway + ), patch.object( + win_network, "_get_ip_dns_info", mock_dns + ), patch.object( + win_network, "_get_ip_multicast_info", mock_multicast + ), patch.object( + win_network, "_get_ip_anycast_info", mock_anycast + ), patch.object( + win_network, "_get_ip_wins_info", mock_wins + ): + + results = win_network.get_interface_info_dot_net() + + assert expected == results + + +def test_get_network_info( + mock_ip_base, + mock_unicast, + mock_gateway, + mock_dns, + mock_multicast, + mock_anycast, + mock_wins, +): + expected = { + "Dell GigabitEthernet": { + "hwaddr": "02:D5:F1:DD:31:E0", + "inet": [ + { + "address": "172.18.87.49", + "broadcast": "172.18.87.63", + "gateway": "192.168.0.1", + "label": "Dell GigabitEthernet", + "netmask": "255.255.255.240", + } + ], + "inet6": [ + { + "address": "fe80::e8a4:1224:5548:2b81", + "gateway": "fe80::208:a2ff:fe0b:de70", + "prefixlen": 64, + } + ], + "up": True, + } + } + mock_int = MagicMock(return_value=[Interface()]) + with patch.object(win_network, "_get_network_interfaces", mock_int), patch.object( + win_network, "_get_ip_base_properties", mock_ip_base + ), patch.object(win_network, "_get_ip_unicast_info", mock_unicast), patch.object( + win_network, "_get_ip_gateway_info", mock_gateway + ), patch.object( + win_network, "_get_ip_dns_info", mock_dns + ), patch.object( + win_network, "_get_ip_multicast_info", mock_multicast + ), patch.object( + win_network, "_get_ip_anycast_info", mock_anycast + ), patch.object( + win_network, "_get_ip_wins_info", mock_wins + ): + + results = win_network.get_interface_info() + + assert expected == results + + +def test__get_base_properties_tap_adapter(): + """ + Adapter Type 53 is apparently an undocumented type corresponding to + OpenVPN TAP Adapters and possibly other TAP Adapters. This test makes + sure the win_network util will catch that. + https://github.com/saltstack/salt/issues/56196 + https://github.com/saltstack/salt/issues/56275 + """ + i_face = Interface( + i_address="03DE4D0713FA", + i_description="Windows TAP Adapter", + i_id="{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", + i_name="Windows TAP Adapter", + i_receive_only=False, + i_status=1, + i_type=53, + ) + expected = { + "alias": "Windows TAP Adapter", + "description": "Windows TAP Adapter", + "id": "{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", + "receive_only": False, + "physical_address": "03:DE:4D:07:13:FA", + "status": "Up", + "type": "TAPAdapter", + } + results = win_network._get_base_properties(i_face=i_face) + assert expected == results + + +def test__get_base_properties_undefined_adapter(): + """ + The Adapter Type 53 may be an arbitrary number assigned by OpenVPN. + This will test the ability to avoid stack tracing on an undefined + adapter type. If one is encountered, just use the description. + """ + i_face = Interface( + i_address="03DE4D0713FA", + i_description="Undefined Adapter", + i_id="{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", + i_name="Undefined", + i_receive_only=False, + i_status=1, + i_type=50, + ) + expected = { + "alias": "Undefined", + "description": "Undefined Adapter", + "id": "{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", + "receive_only": False, + "physical_address": "03:DE:4D:07:13:FA", + "status": "Up", + "type": "Undefined Adapter", + } + results = win_network._get_base_properties(i_face=i_face) + assert expected == results + + +def test__get_network_interfaces_no_error(): + ret = win_network._get_network_interfaces() + assert len(ret) > 0 diff --git a/tests/pytests/unit/utils/test_win_reg.py b/tests/pytests/unit/utils/test_win_reg.py index fa7c01865532..206c40b3089f 100644 --- a/tests/pytests/unit/utils/test_win_reg.py +++ b/tests/pytests/unit/utils/test_win_reg.py @@ -338,6 +338,7 @@ def test_read_value_non_existing(): "Windows\\CurrentVersion" ), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", @@ -360,6 +361,7 @@ def test_read_value_non_existing_key(fake_key): expected = { "comment": "Cannot find key: HKLM\\{}".format(fake_key), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", @@ -375,6 +377,7 @@ def test_read_value_access_denied(fake_key): expected = { "comment": "Access is denied: HKLM\\{}".format(fake_key), "vdata": None, + "vtype": None, "vname": "fake_name", "success": False, "hive": "HKLM", diff --git a/tests/pytests/unit/utils/test_x509.py b/tests/pytests/unit/utils/test_x509.py index 25971af40d83..f13ac97fb33d 100644 --- a/tests/pytests/unit/utils/test_x509.py +++ b/tests/pytests/unit/utils/test_x509.py @@ -1053,6 +1053,7 @@ def test_create_invalidity_date(self, val, expected, critical): "inpt,cls,parsed", [ (("email", "me@example.com"), cx509.RFC822Name, "me@example.com"), + (("email", ".example.com"), cx509.RFC822Name, ".example.com"), ( ("email", "me@überexample.com"), cx509.RFC822Name, @@ -1068,8 +1069,16 @@ def test_create_invalidity_date(self, val, expected, critical): cx509.UniformResourceIdentifier, "https://www.xn--berexample-8db.com", ), + (("URI", "some/path/only"), cx509.UniformResourceIdentifier, "some/path/only"), (("DNS", "example.com"), cx509.DNSName, "example.com"), (("DNS", "überexample.com"), cx509.DNSName, "xn--berexample-8db.com"), + (("DNS", "*.überexample.com"), cx509.DNSName, "*.xn--berexample-8db.com"), + (("DNS", ".überexample.com"), cx509.DNSName, ".xn--berexample-8db.com"), + ( + ("DNS", "γνῶθι.σεαυτόν.gr"), + cx509.DNSName, + "xn--oxakdo9327a.xn--mxahzvhf4c.gr", + ), (("RID", "1.2.3.4"), cx509.RegisteredID, cx509.ObjectIdentifier("1.2.3.4")), ( ("IP", "13.37.13.37"), @@ -1186,9 +1195,108 @@ def test_create_invalidity_date(self, val, expected, critical): ), ], ), + ( + ("DNS", "some.invalid_doma.in"), + salt.exceptions.CommandExecutionError, + "at position 8.*not allowed$", + ), + ( + ("DNS", "some..invalid-doma.in"), + salt.exceptions.CommandExecutionError, + "Empty Label", + ), + ( + ("DNS", "invalid*.wild.card"), + salt.exceptions.CommandExecutionError, + "at position 8.*not allowed", + ), + ( + ("DNS", "invalid.*.wild.card"), + salt.exceptions.CommandExecutionError, + "at position 1.*not allowed", + ), + ( + ("DNS", "*..whats.this"), + salt.exceptions.CommandExecutionError, + "Empty label", + ), + ( + ("DNS", 42), + salt.exceptions.SaltInvocationError, + "Expected string value, got int", + ), + ( + ("DNS", ""), + salt.exceptions.CommandExecutionError, + "Empty domain", + ), + ( + ("DNS", "ἀνεῤῥίφθω.κύβος͵.gr"), + salt.exceptions.CommandExecutionError, + "not allowed at position 6 in 'κύβος͵'$", + ), + ( + ("DNS", "می\u200cخواهم\u200c.iran"), + salt.exceptions.CommandExecutionError, + "^Unknown codepoint adjacent to joiner.* at position 9", + ), + ( + ("DNS", ".*.wildcard-dot.test"), + salt.exceptions.CommandExecutionError, + "Wildcards and leading dots cannot be present together", + ), + ( + ("email", "invalid@*.mail.address"), + salt.exceptions.CommandExecutionError, + "Wildcards are not allowed in this context", + ), + ( + ("email", "invalid@.mail.address"), + salt.exceptions.CommandExecutionError, + "Leading dots are not allowed in this context", + ), + ( + ("email", "Invalid Email "), + salt.exceptions.CommandExecutionError, + "not allowed$", + ), + ( + ("IP", "this is not an IP address"), + salt.exceptions.CommandExecutionError, + "does not seem to be an IP address or network range.", + ), + ( + ("URI", "https://*.χάος.σκάλα.gr"), + salt.exceptions.CommandExecutionError, + "Wildcards are not allowed in this context", + ), + ( + ("URI", "https://.invalid.host"), + salt.exceptions.CommandExecutionError, + "Leading dots are not allowed in this context", + ), + ( + ("dirName", "Et tu, Brute?"), + salt.exceptions.CommandExecutionError, + "Failed parsing rfc4514 dirName string", + ), + ( + ("otherName", "otherName:1.2.3.4;UTF8:some other identifier"), + salt.exceptions.SaltInvocationError, + "otherName is currently not implemented", + ), + ( + ("invalidType", "L'état c'est moi!"), + salt.exceptions.CommandExecutionError, + "GeneralName type invalidtype is invalid", + ), ], ) def test_parse_general_names(inpt, cls, parsed): + if issubclass(cls, Exception): + with pytest.raises(cls, match=parsed): + x509._parse_general_names([inpt]) + return expected = cls(parsed) res = x509._parse_general_names([inpt]) if inpt[0] == "dirName": @@ -1197,6 +1305,99 @@ def test_parse_general_names(inpt, cls, parsed): assert res[0] == expected +@pytest.mark.parametrize( + "inpt,cls,parsed", + [ + (("email", "me@example.com"), cx509.RFC822Name, "me@example.com"), + ( + ("URI", "https://www.example.com"), + cx509.UniformResourceIdentifier, + "https://www.example.com", + ), + (("DNS", "example.com"), cx509.DNSName, "example.com"), + (("DNS", "*.example.com"), cx509.DNSName, "*.example.com"), + (("DNS", ".example.com"), cx509.DNSName, ".example.com"), + ( + ("DNS", "invalid*.wild.card"), + salt.exceptions.CommandExecutionError, + "at position 8.*not allowed", + ), + ( + ("DNS", "invalid.*.wild.card"), + salt.exceptions.CommandExecutionError, + "at position 1.*not allowed", + ), + ( + ("DNS", ".*.wildcard-dot.test"), + salt.exceptions.CommandExecutionError, + "Wildcards and leading dots cannot be present together", + ), + ( + ("DNS", "gott.würfelt.nicht"), + salt.exceptions.CommandExecutionError, + "Cannot encode non-ASCII strings", + ), + ( + ("DNS", "some.invalid_doma.in"), + salt.exceptions.CommandExecutionError, + "at position 8.*not allowed$", + ), + ( + ("DNS", "some..invalid-doma.in"), + salt.exceptions.CommandExecutionError, + "Empty Label", + ), + ( + ("DNS", 42), + salt.exceptions.SaltInvocationError, + "Expected string value, got int", + ), + ( + ("DNS", ""), + salt.exceptions.CommandExecutionError, + "Empty domain", + ), + ( + ("DNS", "*..whats.this"), + salt.exceptions.CommandExecutionError, + "Empty label", + ), + ( + ("email", "invalid@*.mail.address"), + salt.exceptions.CommandExecutionError, + "Wildcards are not allowed in this context", + ), + ( + ("email", "invalid@.mail.address"), + salt.exceptions.CommandExecutionError, + "Leading dots are not allowed in this context", + ), + ( + ("email", "Invalid Email "), + salt.exceptions.CommandExecutionError, + "not allowed$", + ), + ( + ("URI", "https://.invalid.host"), + salt.exceptions.CommandExecutionError, + "Leading dots are not allowed in this context", + ), + ], +) +def test_parse_general_names_without_idna(inpt, cls, parsed): + with patch("salt.utils.x509.HAS_IDNA", False): + if issubclass(cls, Exception): + with pytest.raises(cls, match=parsed): + x509._parse_general_names([inpt]) + return + expected = cls(parsed) + res = x509._parse_general_names([inpt]) + if inpt[0] == "dirName": + assert res[0].value == expected + else: + assert res[0] == expected + + @pytest.mark.parametrize( "inpt", [ diff --git a/tests/pytests/unit/utils/verify/test_clean_path.py b/tests/pytests/unit/utils/verify/test_clean_path.py new file mode 100644 index 000000000000..062821eb7967 --- /dev/null +++ b/tests/pytests/unit/utils/verify/test_clean_path.py @@ -0,0 +1,17 @@ +""" +salt.utils.clean_path works as expected +""" + +import salt.utils.verify + + +def test_clean_path_valid(tmp_path): + path_a = str(tmp_path / "foo") + path_b = str(tmp_path / "foo" / "bar") + assert salt.utils.verify.clean_path(path_a, path_b) == path_b + + +def test_clean_path_invalid(tmp_path): + path_a = str(tmp_path / "foo") + path_b = str(tmp_path / "baz" / "bar") + assert salt.utils.verify.clean_path(path_a, path_b) == "" diff --git a/tests/pytests/unit/utils/verify/test_clean_path_link.py b/tests/pytests/unit/utils/verify/test_clean_path_link.py new file mode 100644 index 000000000000..8af20f5550a7 --- /dev/null +++ b/tests/pytests/unit/utils/verify/test_clean_path_link.py @@ -0,0 +1,66 @@ +""" +Ensure salt.utils.clean_path works with symlinked directories and files +""" +import ctypes + +import pytest + +import salt.utils.verify + + +class Symlink: + """ + symlink(source, link_name) Creates a symbolic link pointing to source named + link_name + """ + + def __init__(self): + self._csl = None + + def __call__(self, source, link_name): + if self._csl is None: + self._csl = ctypes.windll.kernel32.CreateSymbolicLinkW + self._csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32) + self._csl.restype = ctypes.c_ubyte + flags = 0 + if source is not None and source.is_dir(): + flags = 1 + + if self._csl(str(link_name), str(source), flags) == 0: + raise ctypes.WinError() + + +@pytest.fixture(scope="module") +def symlink(): + return Symlink() + + +@pytest.fixture +def setup_links(tmp_path, symlink): + to_path = tmp_path / "linkto" + from_path = tmp_path / "linkfrom" + if salt.utils.platform.is_windows(): + kwargs = {} + else: + kwargs = {"target_is_directory": True} + if salt.utils.platform.is_windows(): + symlink(to_path, from_path, **kwargs) + else: + from_path.symlink_to(to_path, **kwargs) + return to_path, from_path + + +def test_clean_path_symlinked_src(setup_links): + to_path, from_path = setup_links + test_path = from_path / "test" + expect_path = str(to_path / "test") + ret = salt.utils.verify.clean_path(str(from_path), str(test_path)) + assert ret == expect_path, "{} is not {}".format(ret, expect_path) + + +def test_clean_path_symlinked_tgt(setup_links): + to_path, from_path = setup_links + test_path = to_path / "test" + expect_path = str(to_path / "test") + ret = salt.utils.verify.clean_path(str(from_path), str(test_path)) + assert ret == expect_path, "{} is not {}".format(ret, expect_path) diff --git a/tests/pytests/unit/utils/verify/test_verify.py b/tests/pytests/unit/utils/verify/test_verify.py new file mode 100644 index 000000000000..5dcb90b08572 --- /dev/null +++ b/tests/pytests/unit/utils/verify/test_verify.py @@ -0,0 +1,297 @@ +import getpass +import logging +import os +import pathlib +import socket +import stat +import sys +import tempfile + +import pytest + +import salt.utils.files +import salt.utils.verify +from tests.support.mock import patch + +if sys.platform.startswith("win"): + import win32file +else: + import resource + +log = logging.getLogger(__name__) + + +@pytest.mark.skip_on_windows(reason="Not applicable for Windows.") +def test_verify_env_race_condition(): + def _stat(path): + """ + Helper function for mock_stat, we want to raise errors for specific paths, but not until we get into the proper path. + Until then, just return plain os.stat_result + """ + if path in ("/tmp/salt-dir/.file3", "/tmp/salt-dir/.dir3"): + raise AssertionError("The .file3 and .dir3 paths should never be called!") + + if path in ("/tmp/salt-dir/file1", "/tmp/salt-dir/dir1"): + raise FileNotFoundError( + "[Errno 2] No such file or directory: this exception should not be visible" + ) + + # we need to return at least different st_uid in order to trigger chown for these paths + if path in ("/tmp/salt-dir/file4", "/tmp/salt-dir/dir4"): + return os.stat_result([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11]) + + return os.stat_result([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]) + + def _chown(path, uid, gid): + if path in ("/tmp/salt-dir/file4", "/tmp/salt-dir/dir4"): + raise FileNotFoundError( + "[Errno 2] No such file or directory: this exception should not be visible" + ) + + return + + with patch("os.chown", side_effect=_chown) as mock_chown, patch( + "os.stat", side_effect=_stat + ) as mock_stat, patch( + "salt.utils.verify._get_pwnam", return_value=(None, None, 0, 0) + ), patch( + "os.getuid", return_value=0 + ), patch( + "os.listdir", return_value=["subdir"] + ), patch( + "os.path.isdir", return_value=True + ), patch( + "salt.utils.path.os_walk", + return_value=[ + ( + "/tmp/salt-dir", + ["dir1", "dir2", ".dir3", "dir4"], + ["file1", "file2", ".file3", "file4"], + ) + ], + ): + + # verify this runs without issues, even though FNFE is raised + salt.utils.verify.verify_env(["/tmp/salt-dir"], "root", skip_extra=True) + + # and verify it got actually called with the valid paths + mock_stat.assert_any_call("/tmp/salt-dir/file1") + mock_stat.assert_any_call("/tmp/salt-dir/dir1") + + mock_stat.assert_any_call("/tmp/salt-dir/file4") + mock_stat.assert_any_call("/tmp/salt-dir/dir4") + + mock_chown.assert_any_call("/tmp/salt-dir/file4", 0, 0) + mock_chown.assert_any_call("/tmp/salt-dir/dir4", 0, 0) + + +def test_valid_id_exception_handler(): + """ + Ensure we just return False if we pass in invalid or undefined paths. + Refs #8259 + """ + opts = {"pki_dir": "/tmp/whatever"} + assert not salt.utils.verify.valid_id(opts, None) + + +def test_valid_id_pathsep(): + """ + Path separators in id should make it invalid + """ + opts = {"pki_dir": "/tmp/whatever"} + # We have to test both path separators because os.path.normpath will + # convert forward slashes to backslashes on Windows. + for pathsep in ("/", "\\"): + assert not salt.utils.verify.valid_id(opts, pathsep.join(("..", "foobar"))) + + +def test_zmq_verify(): + assert salt.utils.verify.zmq_version() + + +def test_zmq_verify_insufficient(): + import zmq + + with patch.object(zmq, "__version__", "2.1.0"): + assert not salt.utils.verify.zmq_version() + + +def test_user(): + assert salt.utils.verify.check_user(getpass.getuser()) + + +def test_no_user(): + # Catch sys.stderr here since no logging is configured and + # check_user WILL write to sys.stderr + class FakeWriter: + def __init__(self): + self.output = "" + self.errors = "strict" + + def write(self, data): + self.output += data + + def flush(self): + pass + + stderr = sys.stderr + writer = FakeWriter() + sys.stderr = writer + try: + # Now run the test + if sys.platform.startswith("win"): + assert salt.utils.verify.check_user("nouser") + else: + with pytest.raises(SystemExit): + assert not salt.utils.verify.check_user("nouser") + finally: + # Restore sys.stderr + sys.stderr = stderr + if writer.output != 'CRITICAL: User not found: "nouser"\n': + # If there's a different error catch, write it to sys.stderr + sys.stderr.write(writer.output) + + +@pytest.mark.skip_on_windows(reason="No verify_env Windows") +def test_verify_env(tmp_path): + root_dir = tmp_path / "root" + var_dir = root_dir / "var" / "log" / "salt" + key_dir = root_dir / "key_dir" + salt.utils.verify.verify_env([var_dir], getpass.getuser(), root_dir=root_dir) + assert var_dir.exists() + dir_stat = os.stat(var_dir) + assert dir_stat.st_uid == os.getuid() + assert dir_stat.st_mode & stat.S_IRWXU == stat.S_IRWXU + assert dir_stat.st_mode & stat.S_IRWXG == 40 + assert dir_stat.st_mode & stat.S_IRWXO == 5 + + +@pytest.mark.requires_network(only_local_network=True) +def test_verify_socket(): + assert salt.utils.verify.verify_socket("", 18000, 18001) + if socket.has_ipv6: + # Only run if Python is built with IPv6 support; otherwise + # this will just fail. + try: + assert salt.utils.verify.verify_socket("::", 18000, 18001) + except OSError: + # Python has IPv6 enabled, but the system cannot create + # IPv6 sockets (otherwise the test would return a bool) + # - skip the test + # + # FIXME - possibly emit a message that the system does + # not support IPv6. + pass + + +def test_max_open_files(caplog): + with caplog.at_level(logging.DEBUG): + recorded_logs = caplog.record_tuples + logmsg_dbg = "This salt-master instance has accepted {0} minion keys." + logmsg_chk = ( + "The number of accepted minion keys({}) should be lower " + "than 1/4 of the max open files soft setting({}). According " + "to the system's hard limit, there's still a margin of {} " + "to raise the salt's max_open_files setting. Please consider " + "raising this value." + ) + logmsg_crash = ( + "The number of accepted minion keys({}) should be lower " + "than 1/4 of the max open files soft setting({}). " + "salt-master will crash pretty soon! According to the " + "system's hard limit, there's still a margin of {} to " + "raise the salt's max_open_files setting. Please consider " + "raising this value." + ) + if sys.platform.startswith("win"): + logmsg_crash = ( + "The number of accepted minion keys({}) should be lower " + "than 1/4 of the max open files soft setting({}). " + "salt-master will crash pretty soon! Please consider " + "raising this value." + ) + + if sys.platform.startswith("win"): + # Check the Windows API for more detail on this + # http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx + # and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html + mof_s = mof_h = win32file._getmaxstdio() + else: + mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE) + tempdir = tempfile.mkdtemp(prefix="fake-keys") + keys_dir = pathlib.Path(tempdir, "minions") + keys_dir.mkdir() + + mof_test = 256 + + if sys.platform.startswith("win"): + win32file._setmaxstdio(mof_test) + else: + resource.setrlimit(resource.RLIMIT_NOFILE, (mof_test, mof_h)) + + try: + prev = 0 + for newmax, level in ( + (24, None), + (66, "INFO"), + (127, "WARNING"), + (196, "CRITICAL"), + ): + + for n in range(prev, newmax): + kpath = pathlib.Path(keys_dir, str(n)) + with salt.utils.files.fopen(kpath, "w") as fp_: + fp_.write(str(n)) + + opts = {"max_open_files": newmax, "pki_dir": tempdir} + + salt.utils.verify.check_max_open_files(opts) + + if level is None: + # No log message is triggered, only the DEBUG one which + # tells us how many minion keys were accepted. + assert [logmsg_dbg.format(newmax)] == caplog.messages + else: + assert logmsg_dbg.format(newmax) in caplog.messages + assert ( + logmsg_chk.format( + newmax, + mof_test, + mof_test - newmax + if sys.platform.startswith("win") + else mof_h - newmax, + ) + in caplog.messages + ) + prev = newmax + + newmax = mof_test + for n in range(prev, newmax): + kpath = pathlib.Path(keys_dir, str(n)) + with salt.utils.files.fopen(kpath, "w") as fp_: + fp_.write(str(n)) + + opts = {"max_open_files": newmax, "pki_dir": tempdir} + + salt.utils.verify.check_max_open_files(opts) + assert logmsg_dbg.format(newmax) in caplog.messages + assert ( + logmsg_crash.format( + newmax, + mof_test, + mof_test - newmax + if sys.platform.startswith("win") + else mof_h - newmax, + ) + in caplog.messages + ) + except OSError as err: + if err.errno == 24: + # Too many open files + pytest.skip("We've hit the max open files setting") + raise + finally: + if sys.platform.startswith("win"): + win32file._setmaxstdio(mof_h) + else: + resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h)) diff --git a/tests/pytests/unit/utils/verify/test_verify_logs.py b/tests/pytests/unit/utils/verify/test_verify_logs.py new file mode 100644 index 000000000000..f491c65656a5 --- /dev/null +++ b/tests/pytests/unit/utils/verify/test_verify_logs.py @@ -0,0 +1,76 @@ +import getpass +import os +import pathlib + +import pytest + +import salt.utils.verify +from tests.support.mock import MagicMock, patch + + +def test_verify_logs_filter(): + filtered = salt.utils.verify.verify_logs_filter( + ["udp://foo", "tcp://bar", "/tmp/foo", "file://tmp/bar"] + ) + assert filtered == ["/tmp/foo"], filtered + + +@pytest.mark.skip_on_windows(reason="Not applicable on Windows") +def test_verify_log_files_udp_scheme(): + salt.utils.verify.verify_log_files(["udp://foo"], getpass.getuser()) + assert not pathlib.Path(os.getcwd(), "udp:").is_dir() + + +@pytest.mark.skip_on_windows(reason="Not applicable on Windows") +def test_verify_log_files_tcp_scheme(): + salt.utils.verify.verify_log_files(["udp://foo"], getpass.getuser()) + assert not pathlib.Path(os.getcwd(), "tcp:").is_dir() + + +@pytest.mark.skip_on_windows(reason="Not applicable on Windows") +def test_verify_log_files_file_scheme(): + salt.utils.verify.verify_log_files(["file://{}"], getpass.getuser()) + assert not pathlib.Path(os.getcwd(), "file:").is_dir() + + +@pytest.mark.skip_on_windows(reason="Not applicable on Windows") +def test_verify_log_files(tmp_path): + path = tmp_path / "foo" / "bar.log" + assert not path.exists() + salt.utils.verify.verify_log_files([str(path)], getpass.getuser()) + assert path.exists() + + +def test_verify_log(): + """ + Test that verify_log works as expected + """ + message = "Insecure logging configuration detected! Sensitive data may be logged." + + mock_cheese = MagicMock() + with patch.object(salt.utils.verify.log, "warning", mock_cheese): + salt.utils.verify.verify_log({"log_level": "cheeseshop"}) + mock_cheese.assert_called_once_with(message) + + mock_trace = MagicMock() + with patch.object(salt.utils.verify.log, "warning", mock_trace): + salt.utils.verify.verify_log({"log_level": "trace"}) + mock_trace.assert_called_once_with(message) + + mock_none = MagicMock() + with patch.object(salt.utils.verify.log, "warning", mock_none): + salt.utils.verify.verify_log({}) + mock_none.assert_called_once_with(message) + + mock_info = MagicMock() + with patch.object(salt.utils.verify.log, "warning", mock_info): + salt.utils.verify.verify_log({"log_level": "info"}) + assert mock_info.call_count == 0 + + +def test_insecure_log(): + """ + test insecure_log that it returns accurate insecure log levels + """ + ret = salt.utils.verify.insecure_log() + assert ret == ["all", "debug", "garbage", "profile", "trace"] diff --git a/tests/pytests/unit/wheel/__init__.py b/tests/pytests/unit/wheel/__init__.py new file mode 100644 index 000000000000..e69de29bb2d1 diff --git a/tests/pytests/unit/wheel/test_file_roots.py b/tests/pytests/unit/wheel/test_file_roots.py new file mode 100644 index 000000000000..5c8c54f23a59 --- /dev/null +++ b/tests/pytests/unit/wheel/test_file_roots.py @@ -0,0 +1,267 @@ +import pathlib + +import pytest + +from salt.wheel import file_roots + + +def _make_temp_root_file(root, *subpaths, binary=False, dir_only=False): + """ + Creates a file under the specified subpaths of the given root with the filepath as its content. + """ + full_path = pathlib.Path(root, *subpaths) + full_path.parent.mkdir(exist_ok=True, parents=True) + if not dir_only: + if binary: + content = b"\x00" + full_path.write_bytes(content) + else: + content = str(full_path) + full_path.write_text(content) + + +@pytest.fixture +def base_root_1(tmp_path): + path = tmp_path / "base_root_1" + path.mkdir() + return path + + +@pytest.fixture +def base_root_2(tmp_path): + path = tmp_path / "base_root_2" + path.mkdir() + return path + + +@pytest.fixture +def prod_root_1(tmp_path): + path = tmp_path / "prod_root_1" + path.mkdir() + return path + + +@pytest.fixture +def prod_root_2(tmp_path): + path = tmp_path / "prod_root_2" + path.mkdir() + return path + + +@pytest.fixture +def populated_roots( + base_root_1, + base_root_2, + prod_root_1, + prod_root_2, +): + roots = { + "base": [str(base_root_1), str(base_root_2)], + "prod": [str(prod_root_1), str(prod_root_2)], + } + + _make_temp_root_file(base_root_1, "test_base_1_file") + _make_temp_root_file(base_root_1, "common_file") + _make_temp_root_file(base_root_1, "base_1_subdir", "test_base_1_file_in_subdir") + _make_temp_root_file(base_root_1, "base_1_subdir", "common_file") + _make_temp_root_file( + base_root_1, "base_1_subdir", "test_base_1_file_in_subdir_binary", binary=True + ) + _make_temp_root_file( + base_root_1, + "base_1_subdir", + "base_1_sub_subdir", + "test_base_1_file_in_sub_subdir", + ) + _make_temp_root_file(base_root_2, "test_base_2_file_1") + _make_temp_root_file(base_root_2, "test_base_2_file_2") + _make_temp_root_file(base_root_2, "common_file") + _make_temp_root_file(prod_root_1, "test_prod_2_file") + _make_temp_root_file(prod_root_1, "common_file") + _make_temp_root_file(prod_root_1, "prod_1_subdir", dir_only=True) + + return roots + + +@pytest.fixture +def base_list_env( + base_root_1, + base_root_2, +): + return { + str(base_root_1): { + "base_1_subdir": { + "base_1_sub_subdir": {"test_base_1_file_in_sub_subdir": "f"}, + "test_base_1_file_in_subdir_binary": "f", + "common_file": "f", + "test_base_1_file_in_subdir": "f", + }, + "common_file": "f", + "test_base_1_file": "f", + }, + str(base_root_2): { + "common_file": "f", + "test_base_2_file_2": "f", + "test_base_2_file_1": "f", + }, + } + + +@pytest.fixture +def prod_list_env( + prod_root_1, + prod_root_2, +): + return { + str(prod_root_1): {"common_file": "f", "test_prod_2_file": "f"}, + str(prod_root_2): {}, + } + + +@pytest.fixture +def configure_loader_modules(populated_roots): + return { + file_roots: { + "__opts__": {"file_roots": populated_roots}, + }, + } + + +def test_find(base_root_1, base_root_2): + file_name = "common_file" + expected = [ + {str(base_root_1 / file_name): "txt"}, + {str(base_root_2 / file_name): "txt"}, + ] + ret = file_roots.find(file_name) + assert ret == expected + + +def test_find_prod(prod_root_1): + file_name = "common_file" + expected = [{str(prod_root_1 / file_name): "txt"}] + ret = file_roots.find(file_name, saltenv="prod") + assert ret == expected + + +def test_find_in_subdir(base_root_1): + file_name = pathlib.Path("base_1_subdir", "test_base_1_file_in_subdir") + expected = [{str(base_root_1 / file_name): "txt"}] + ret = file_roots.find(str(file_name)) + assert ret == expected + + +def test_find_does_not_exist(): + file_name = "prod_1_subdir" + expected = [] + ret = file_roots.find(str(file_name), saltenv="prod") + assert ret == expected + + +def test_find_binary(base_root_1): + file_name = pathlib.Path("base_1_subdir", "test_base_1_file_in_subdir_binary") + expected = [{str(base_root_1 / file_name): "bin"}] + ret = file_roots.find(str(file_name)) + assert ret == expected + + +def test_list_env(base_list_env): + ret = file_roots.list_env() + assert ret == base_list_env + + +def test_list_env_prod(prod_list_env): + ret = file_roots.list_env(saltenv="prod") + assert ret == prod_list_env + + +def test_list_roots(base_list_env, prod_list_env): + expected = {"base": [base_list_env], "prod": [prod_list_env]} + ret = file_roots.list_roots() + assert ret == expected + + +def test_read(base_root_1, base_root_2): + file_name = "common_file" + root_1_file = str(base_root_1 / file_name) + root_2_file = str(base_root_2 / file_name) + expected = [{root_1_file: root_1_file}, {root_2_file: root_2_file}] + ret = file_roots.read(file_name) + assert ret == expected + + +def test_read_prod(prod_root_1): + file_name = "common_file" + root_1_file = str(prod_root_1 / file_name) + expected = [{root_1_file: root_1_file}] + ret = file_roots.read(file_name, saltenv="prod") + assert ret == expected + + +def test_read_binary(): + file_name = pathlib.Path("base_1_subdir", "test_base_1_file_in_subdir_binary") + ret = file_roots.read(str(file_name)) + assert ret == [] + + +def test_read_in_subdir(base_root_1): + file_name = pathlib.Path("base_1_subdir", "test_base_1_file_in_subdir") + subdir_file = str(base_root_1 / file_name) + expected = [{subdir_file: subdir_file}] + ret = file_roots.read(str(file_name)) + assert ret == expected + + +def test_write(base_root_1): + file_name = "testfile" + ret = file_roots.write(file_name, file_name) + assert f"Wrote data to file {str(base_root_1 / file_name)}" in ret + + +def test_write_index(base_root_2): + file_name = "testfile" + ret = file_roots.write(file_name, file_name, index=1) + assert f"Wrote data to file {str(base_root_2 / file_name)}" in ret + + +def test_write_prod(prod_root_2): + file_name = "testfile" + ret = file_roots.write(file_name, file_name, saltenv="prod", index=1) + assert f"Wrote data to file {str(prod_root_2 / file_name)}" in ret + + +def test_write_subdir(prod_root_1): + file_name = str(pathlib.Path("prod_1_subdir", "testfile")) + ret = file_roots.write(file_name, file_name, saltenv="prod") + assert f"Wrote data to file {str(prod_root_1 / file_name)}" in ret + + +def test_write_make_new_subdir(prod_root_2): + file_name = str(pathlib.Path("prod_2_subdir", "testfile")) + ret = file_roots.write(file_name, file_name, saltenv="prod", index=1) + assert f"Wrote data to file {str(prod_root_2 / file_name)}" in ret + + +def test_write_invalid_env(): + file_name = "testfile" + env = "not_an_env" + ret = file_roots.write(file_name, file_name, saltenv=env) + assert f"{env} is not present" in ret + + +def test_write_invalid_index(): + file_name = "testfile" + ret = file_roots.write(file_name, file_name, index=2) + assert "index 2 in environment base is not present" in ret + + +def test_write_invalid_absolute_path(base_root_1): + file_name = str(base_root_1 / "testfile") + ret = file_roots.write(file_name, file_name) + assert "is not relative to the environment" in ret + + +def test_write_invalid_path(): + file_name = str(pathlib.Path("..", "testfile")) + ret = file_roots.write(file_name, file_name) + assert "Invalid path: " in ret diff --git a/tests/support/ext/console.py b/tests/support/ext/console.py index 41e2dd466a04..8c54638fd5e4 100644 --- a/tests/support/ext/console.py +++ b/tests/support/ext/console.py @@ -1,4 +1,3 @@ -# vim: sw=4 ts=4 fenc=utf-8 """ getTerminalSize() - get width and height of console diff --git a/tests/support/helpers.py b/tests/support/helpers.py index 3556e08853b0..f3a73090fa72 100644 --- a/tests/support/helpers.py +++ b/tests/support/helpers.py @@ -33,6 +33,7 @@ import attr import pytest +import pytestskipmarkers.utils.platform from pytestshellutils.exceptions import ProcessFailed from pytestshellutils.utils import ports from pytestshellutils.utils.processes import ProcessResult @@ -1644,6 +1645,10 @@ def _default_venv_bin_dir(self): return pathlib.Path(self.venv_python).parent def __enter__(self): + if pytestskipmarkers.utils.platform.is_fips_enabled(): + pytest.skip( + "Test cannot currently create virtual environments on a FIPS enabled platform" + ) try: self._create_virtualenv() except subprocess.CalledProcessError: diff --git a/tests/support/mock.py b/tests/support/mock.py index 2256ad8f5da9..c050d0bf4e65 100644 --- a/tests/support/mock.py +++ b/tests/support/mock.py @@ -27,6 +27,7 @@ ANY, DEFAULT, FILTER_DIR, + AsyncMock, MagicMock, Mock, NonCallableMagicMock, diff --git a/pkg/tests/support/helpers.py b/tests/support/pkg.py similarity index 57% rename from pkg/tests/support/helpers.py rename to tests/support/pkg.py index 3e8adadefed7..3d0e086e077c 100644 --- a/pkg/tests/support/helpers.py +++ b/tests/support/pkg.py @@ -6,11 +6,9 @@ import pprint import re import shutil -import tarfile import textwrap import time -from typing import TYPE_CHECKING, Any, Dict, List -from zipfile import ZipFile +from typing import TYPE_CHECKING, Dict, List import attr import distro @@ -18,6 +16,7 @@ import psutil import pytest import requests +import saltfactories.cli from pytestshellutils.shell import DaemonImpl, Subprocess from pytestshellutils.utils.processes import ( ProcessResult, @@ -26,89 +25,68 @@ ) from pytestskipmarkers.utils import platform from saltfactories.bases import SystemdSaltDaemonImpl -from saltfactories.cli import call, key, salt +from saltfactories.cli import call, key from saltfactories.daemons import api, master, minion from saltfactories.utils import cli_scripts -try: - import crypt +import salt.utils.files +from tests.conftest import CODE_DIR +from tests.support.pytest.helpers import TestAccount - HAS_CRYPT = True -except ImportError: - HAS_CRYPT = False -try: - import pwd - - HAS_PWD = True -except ImportError: - HAS_PWD = False - -try: - import winreg - - HAS_WINREG = True -except ImportError: - HAS_WINREG = False - -TESTS_DIR = pathlib.Path(__file__).resolve().parent.parent -CODE_DIR = TESTS_DIR.parent -ARTIFACTS_DIR = CODE_DIR / "artifacts" +ARTIFACTS_DIR = CODE_DIR / "artifacts" / "pkg" log = logging.getLogger(__name__) @attr.s(kw_only=True, slots=True) class SaltPkgInstall: - conf_dir: pathlib.Path = attr.ib() - system_service: bool = attr.ib(default=False) - proc: Subprocess = attr.ib(init=False) - pkgs: List[str] = attr.ib(factory=list) - onedir: bool = attr.ib(default=False) - singlebin: bool = attr.ib(default=False) - compressed: bool = attr.ib(default=False) - hashes: Dict[str, Dict[str, Any]] = attr.ib() + pkg_system_service: bool = attr.ib(default=False) + proc: Subprocess = attr.ib(init=False, repr=False) + + # Paths root: pathlib.Path = attr.ib(default=None) run_root: pathlib.Path = attr.ib(default=None) ssm_bin: pathlib.Path = attr.ib(default=None) bin_dir: pathlib.Path = attr.ib(default=None) - # The artifact is an installer (exe, msi, pkg, rpm, deb) - installer_pkg: bool = attr.ib(default=False) + install_dir: pathlib.Path = attr.ib(init=False) + binary_paths: Dict[str, List[pathlib.Path]] = attr.ib(init=False) + config_path: str = attr.ib(init=False) + conf_dir: pathlib.Path = attr.ib() + + # Test selection flags upgrade: bool = attr.ib(default=False) - # install salt or not. This allows someone - # to test a currently installed version of salt + downgrade: bool = attr.ib(default=False) + classic: bool = attr.ib(default=False) + + # Installing flags no_install: bool = attr.ib(default=False) no_uninstall: bool = attr.ib(default=False) + # Distribution/system information distro_id: str = attr.ib(init=False) distro_codename: str = attr.ib(init=False) distro_name: str = attr.ib(init=False) distro_version: str = attr.ib(init=False) + + # Version information + prev_version: str = attr.ib() + use_prev_version: str = attr.ib() + artifact_version: str = attr.ib(init=False) + version: str = attr.ib(init=False) + + # Package (and management) metadata pkg_mngr: str = attr.ib(init=False) rm_pkg: str = attr.ib(init=False) + dbg_pkg: str = attr.ib(init=False) salt_pkgs: List[str] = attr.ib(init=False) - install_dir: pathlib.Path = attr.ib(init=False) - binary_paths: Dict[str, List[pathlib.Path]] = attr.ib(init=False) - classic: bool = attr.ib(default=False) - prev_version: str = attr.ib() - pkg_version: str = attr.ib(default="1") - repo_data: str = attr.ib(init=False) - major: str = attr.ib(init=False) - minor: str = attr.ib(init=False) - relenv: bool = attr.ib(default=True) + pkgs: List[str] = attr.ib(factory=list) file_ext: bool = attr.ib(default=None) + relenv: bool = attr.ib(default=True) @proc.default def _default_proc(self): return Subprocess() - @hashes.default - def _default_hashes(self): - return { - "BLAKE2B": {"file": None, "tool": "-blake2b512"}, - "SHA3_512": {"file": None, "tool": "-sha3-512"}, - "SHA512": {"file": None, "tool": "-sha512"}, - } - @distro_id.default def _default_distro_id(self): return distro.id().lower() @@ -119,16 +97,21 @@ def _default_distro_codename(self): @distro_name.default def _default_distro_name(self): - if distro.name(): - return distro.name().split()[0].lower() + name = distro.name() + if name: + if "vmware" in name.lower(): + return name.split()[1].lower() + return name.split()[0].lower() @distro_version.default def _default_distro_version(self): + if self.distro_name == "photon": + return distro.version().split(".")[0] return distro.version().lower() @pkg_mngr.default def _default_pkg_mngr(self): - if self.distro_id in ("centos", "redhat", "amzn", "fedora"): + if self.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"): return "yum" elif self.distro_id in ("ubuntu", "debian"): ret = self.proc.run("apt-get", "update") @@ -137,11 +120,20 @@ def _default_pkg_mngr(self): @rm_pkg.default def _default_rm_pkg(self): - if self.distro_id in ("centos", "redhat", "amzn", "fedora"): + if self.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"): return "remove" elif self.distro_id in ("ubuntu", "debian"): return "purge" + @dbg_pkg.default + def _default_dbg_pkg(self): + dbg_pkg = None + if self.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"): + dbg_pkg = "salt-debuginfo" + elif self.distro_id in ("ubuntu", "debian"): + dbg_pkg = "salt-dbg" + return dbg_pkg + @salt_pkgs.default def _default_salt_pkgs(self): salt_pkgs = [ @@ -152,10 +144,13 @@ def _default_salt_pkgs(self): "salt-cloud", "salt-minion", ] - if self.distro_id in ("centos", "redhat", "amzn", "fedora"): + if self.distro_id in ("centos", "redhat", "amzn", "fedora", "photon"): salt_pkgs.append("salt") elif self.distro_id in ("ubuntu", "debian"): salt_pkgs.append("salt-common") + if packaging.version.parse(self.version) >= packaging.version.parse("3006.3"): + if self.dbg_pkg: + salt_pkgs.append(self.dbg_pkg) return salt_pkgs @install_dir.default @@ -165,203 +160,121 @@ def _default_install_dir(self): os.getenv("ProgramFiles"), "Salt Project", "Salt" ).resolve() elif platform.is_darwin(): - # TODO: Add mac install dir path install_dir = pathlib.Path("/opt", "salt") else: install_dir = pathlib.Path("/opt", "saltstack", "salt") return install_dir - @repo_data.default - def _default_repo_data(self): + @config_path.default + def _default_config_path(self): """ - Query to see the published Salt artifacts - from repo.json + Default location for salt configurations """ - url = "https://repo.saltproject.io/salt/onedir/repo.json" - ret = requests.get(url) - data = ret.json() - return data + if platform.is_windows(): + config_path = pathlib.Path("C://salt", "etc", "salt") + else: + config_path = pathlib.Path("/etc", "salt") + return config_path - def check_relenv(self, version): + @version.default + def _default_version(self): """ - Detects if we are using relenv - onedir build + The version to be installed at the start """ - relenv = False - if packaging.version.parse(version) >= packaging.version.parse("3006.0"): - relenv = True - return relenv + if not self.upgrade and not self.use_prev_version: + version = self.artifact_version + else: + version = self.prev_version + parsed = packaging.version.parse(version) + version = f"{parsed.major}.{parsed.minor}" + if self.distro_id in ("ubuntu", "debian"): + self.stop_services() + return version + + @artifact_version.default + def _default_artifact_version(self): + """ + The version of the local salt artifacts being tested, based on regex matching + """ + version = "" + artifacts = list(ARTIFACTS_DIR.glob("**/*.*")) + for artifact in artifacts: + version = re.search( + r"([0-9].*)(\-[0-9].fc|\-[0-9].el|\+ds|\_all|\_any|\_amd64|\_arm64|\-[0-9].am|(\-[0-9]-[a-z]*-[a-z]*[0-9_]*.|\-[0-9]*.*)(exe|msi|pkg|rpm|deb))", + artifact.name, + ) + if version: + version = version.groups()[0].replace("_", "-").replace("~", "") + version = version.split("-")[0] + break + if not version: + pytest.fail( + f"Failed to package artifacts in '{ARTIFACTS_DIR}'. " + f"Directory Contents:\n{pprint.pformat(artifacts)}" + ) + return version def update_process_path(self): # The installer updates the path for the system, but that doesn't # make it to this python session, so we need to update that os.environ["PATH"] = ";".join([str(self.install_dir), os.getenv("path")]) - # When the MSI installer is run from self.proc.run, it doesn't update - # the registry. When run from a normal command prompt it does. Until we - # figure that out, we will update the process path as above. This - # doesn't really check that the path is being set though... but I see - # no other way around this - # if HAS_WINREG: - # log.debug("Refreshing the path") - # # Get the updated system path from the registry - # path_key = winreg.OpenKeyEx( - # winreg.HKEY_LOCAL_MACHINE, - # r"SYSTEM\CurrentControlSet\Control\Session Manager\Environment", - # ) - # current_path = winreg.QueryValueEx(path_key, "path")[0] - # path_key.Close() - # # Update the path for the current running process - # os.environ["PATH"] = current_path - - def get_version(self, version_only=False): - """ - Return the version information - needed to install a previous version - of Salt. - """ - prev_version = self.prev_version - pkg_version = None - if not self.upgrade: - # working with local artifact - version = "" - for artifact in ARTIFACTS_DIR.glob("**/*.*"): - version = re.search( - r"([0-9].*)(\-[0-9].fc|\-[0-9].el|\+ds|\_all|\_any|\_amd64|\_arm64|\-[0-9].am|(\-[0-9]-[a-z]*-[a-z]*[0-9_]*.|\-[0-9]*.*)(tar.gz|tar.xz|zip|exe|msi|pkg|rpm|deb))", - artifact.name, - ) - if version: - version = version.groups()[0].replace("_", "-").replace("~", "") - version = version.split("-")[0] - # TODO: Remove this clause. This is to handle a versioning difficulty between pre-3006 - # dev versions and older salt versions on deb-based distros - if version.startswith("1:"): - version = version[2:] - break - major, minor = version.split(".", 1) - else: - if not prev_version: - # We did not pass in a version, lets detect the latest - # version information of a Salt artifact. - latest = list(self.repo_data["latest"].keys())[0] - version = self.repo_data["latest"][latest]["version"] - if "-" in version: - prev_version, pkg_version = version.split("-") - else: - prev_version, pkg_version = version, None - else: - # We passed in a version, but lets check if the pkg_version - # is defined. Relenv pkgs do not define a pkg build number - if "-" not in prev_version and not self.check_relenv( - version=prev_version - ): - pkg_numbers = [ - x for x in self.repo_data.keys() if prev_version in x - ] - pkg_version = 1 - for number in pkg_numbers: - number = int(number.split("-")[1]) - if number > pkg_version: - pkg_version = number - major, minor = prev_version.split(".") - if version_only: - return version - return major, minor, prev_version, pkg_version - def __attrs_post_init__(self): - self.major, self.minor, self.prev_version, self.pkg_version = self.get_version() - self.relenv = self.check_relenv(self.major) - file_ext_re = r"tar\.gz" + self.relenv = packaging.version.parse(self.version) >= packaging.version.parse( + "3006.0" + ) + + file_ext_re = "rpm|deb" if platform.is_darwin(): - file_ext_re = r"tar\.gz|pkg" + file_ext_re = "pkg" if platform.is_windows(): - file_ext_re = "zip|exe|msi" + file_ext_re = "exe|msi" + for f_path in ARTIFACTS_DIR.glob("**/*.*"): f_path = str(f_path) if re.search(f"salt-(.*).({file_ext_re})$", f_path, re.IGNORECASE): - # Compressed can be zip, tar.gz, exe, or pkg. All others are - # deb and rpm - self.compressed = True self.file_ext = os.path.splitext(f_path)[1].strip(".") - if self.file_ext == "gz": - if f_path.endswith("tar.gz"): - self.file_ext = "tar.gz" self.pkgs.append(f_path) if platform.is_windows(): self.root = pathlib.Path(os.getenv("LocalAppData")).resolve() - if self.file_ext == "zip": - with ZipFile(f_path, "r") as zip: - first = zip.infolist()[0] - if first.filename == "salt/ssm.exe": - self.onedir = True - self.bin_dir = self.root / "salt" / "salt" - self.run_root = self.bin_dir / "salt.exe" - self.ssm_bin = self.root / "salt" / "ssm.exe" - elif first.filename == "salt.exe": - self.singlebin = True - self.run_root = self.root / "salt.exe" - self.ssm_bin = self.root / "ssm.exe" - else: - log.error( - "Unexpected archive layout. First: %s", - first.filename, - ) - elif self.file_ext in ["exe", "msi"]: - self.compressed = False - self.onedir = True - self.installer_pkg = True + if self.file_ext in ["exe", "msi"]: self.root = self.install_dir.parent self.bin_dir = self.install_dir self.ssm_bin = self.install_dir / "ssm.exe" + self.run_root = self.bin_dir / "bin" / "salt.exe" + if not self.relenv and not self.classic: + self.ssm_bin = self.bin_dir / "bin" / "ssm.exe" else: log.error("Unexpected file extension: %s", self.file_ext) - else: - if platform.is_darwin(): - self.root = pathlib.Path(os.sep, "opt") - else: - self.root = pathlib.Path(os.sep, "usr", "local", "bin") - + if self.use_prev_version: + self.bin_dir = self.install_dir / "bin" + self.run_root = self.bin_dir / "salt.exe" + self.ssm_bin = self.bin_dir / "ssm.exe" + if self.file_ext == "msi" or self.relenv: + self.ssm_bin = self.install_dir / "ssm.exe" + if ( + self.install_dir / "salt-minion.exe" + ).exists() and not self.relenv: + log.debug( + f"Removing {(self.install_dir / 'salt-minion.exe')}" + ) + (self.install_dir / "salt-minion.exe").unlink() + + elif platform.is_darwin(): + self.root = pathlib.Path("/opt") if self.file_ext == "pkg": - self.compressed = False - self.onedir = True - self.installer_pkg = True self.bin_dir = self.root / "salt" / "bin" self.run_root = self.bin_dir / "run" - elif self.file_ext == "tar.gz": - with tarfile.open(f_path) as tar: - # The first item will be called salt - first = next(iter(tar.getmembers())) - if first.name == "salt" and first.isdir(): - self.onedir = True - self.bin_dir = self.root / "salt" / "run" - self.run_root = self.bin_dir / "run" - elif first.name == "salt" and first.isfile(): - self.singlebin = True - self.run_root = self.root / "salt" - else: - log.error( - "Unexpected archive layout. First: %s (isdir: %s, isfile: %s)", - first.name, - first.isdir(), - first.isfile(), - ) else: log.error("Unexpected file extension: %s", self.file_ext) - if re.search( - r"salt(.*)(x86_64|all|amd64|aarch64|arm64)\.(rpm|deb)$", f_path - ): - self.installer_pkg = True - self.pkgs.append(f_path) - if not self.pkgs: pytest.fail("Could not find Salt Artifacts") python_bin = self.install_dir / "bin" / "python3" if platform.is_windows(): python_bin = self.install_dir / "Scripts" / "python.exe" - if not self.compressed: - if platform.is_windows(): + if self.relenv: self.binary_paths = { "call": ["salt-call.exe"], "cp": ["salt-cp.exe"], @@ -369,11 +282,29 @@ def __attrs_post_init__(self): "pip": ["salt-pip.exe"], "python": [python_bin], } + elif self.classic: + self.binary_paths = { + "call": [self.install_dir / "salt-call.bat"], + "cp": [self.install_dir / "salt-cp.bat"], + "minion": [self.install_dir / "salt-minion.bat"], + "python": [self.bin_dir / "python.exe"], + } + self.binary_paths["pip"] = self.binary_paths["python"] + ["-m", "pip"] else: - if os.path.exists(self.install_dir / "bin" / "salt"): - install_dir = self.install_dir / "bin" - else: - install_dir = self.install_dir + self.binary_paths = { + "call": [str(self.run_root), "call"], + "cp": [str(self.run_root), "cp"], + "minion": [str(self.run_root), "minion"], + "pip": [str(self.run_root), "pip"], + "python": [str(self.run_root), "shell"], + } + + else: + if os.path.exists(self.install_dir / "bin" / "salt"): + install_dir = self.install_dir / "bin" + else: + install_dir = self.install_dir + if self.relenv: self.binary_paths = { "salt": [install_dir / "salt"], "api": [install_dir / "salt-api"], @@ -391,61 +322,56 @@ def __attrs_post_init__(self): "pip": [install_dir / "salt-pip"], "python": [python_bin], } - else: - if self.run_root and os.path.exists(self.run_root): - if platform.is_windows(): - self.binary_paths = { - "call": [str(self.run_root), "call"], - "cp": [str(self.run_root), "cp"], - "minion": [str(self.run_root), "minion"], - "pip": [str(self.run_root), "pip"], - "python": [python_bin], - } - else: - self.binary_paths = { - "salt": [str(self.run_root)], - "api": [str(self.run_root), "api"], - "call": [str(self.run_root), "call"], - "cloud": [str(self.run_root), "cloud"], - "cp": [str(self.run_root), "cp"], - "key": [str(self.run_root), "key"], - "master": [str(self.run_root), "master"], - "minion": [str(self.run_root), "minion"], - "proxy": [str(self.run_root), "proxy"], - "run": [str(self.run_root), "run"], - "ssh": [str(self.run_root), "ssh"], - "syndic": [str(self.run_root), "syndic"], - "spm": [str(self.run_root), "spm"], - "pip": [str(self.run_root), "pip"], - "python": [python_bin], - } else: - if platform.is_windows(): - self.binary_paths = { - "call": [self.install_dir / "salt-call.exe"], - "cp": [self.install_dir / "salt-cp.exe"], - "minion": [self.install_dir / "salt-minion.exe"], - "pip": [self.install_dir / "salt-pip.exe"], - "python": [python_bin], - } + self.binary_paths = { + "salt": [shutil.which("salt")], + "api": [shutil.which("salt-api")], + "call": [shutil.which("salt-call")], + "cloud": [shutil.which("salt-cloud")], + "cp": [shutil.which("salt-cp")], + "key": [shutil.which("salt-key")], + "master": [shutil.which("salt-master")], + "minion": [shutil.which("salt-minion")], + "proxy": [shutil.which("salt-proxy")], + "run": [shutil.which("salt-run")], + "ssh": [shutil.which("salt-ssh")], + "syndic": [shutil.which("salt-syndic")], + "spm": [shutil.which("spm")], + "python": [str(pathlib.Path("/usr/bin/python3"))], + } + if self.classic: + if platform.is_darwin(): + # `which` is not catching the right paths on downgrades, explicitly defining them here + self.binary_paths = { + "salt": [self.bin_dir / "salt"], + "api": [self.bin_dir / "salt-api"], + "call": [self.bin_dir / "salt-call"], + "cloud": [self.bin_dir / "salt-cloud"], + "cp": [self.bin_dir / "salt-cp"], + "key": [self.bin_dir / "salt-key"], + "master": [self.bin_dir / "salt-master"], + "minion": [self.bin_dir / "salt-minion"], + "proxy": [self.bin_dir / "salt-proxy"], + "run": [self.bin_dir / "salt-run"], + "ssh": [self.bin_dir / "salt-ssh"], + "syndic": [self.bin_dir / "salt-syndic"], + "spm": [self.bin_dir / "spm"], + "python": [str(self.bin_dir / "python3")], + "pip": [str(self.bin_dir / "pip3")], + } + else: + self.binary_paths["pip"] = [str(pathlib.Path("/usr/bin/pip3"))] + self.proc.run(*self.binary_paths["pip"], "install", "-U", "pip") + self.proc.run( + *self.binary_paths["pip"], "install", "-U", "pyopenssl" + ) else: - self.binary_paths = { - "salt": [self.install_dir / "salt"], - "api": [self.install_dir / "salt-api"], - "call": [self.install_dir / "salt-call"], - "cloud": [self.install_dir / "salt-cloud"], - "cp": [self.install_dir / "salt-cp"], - "key": [self.install_dir / "salt-key"], - "master": [self.install_dir / "salt-master"], - "minion": [self.install_dir / "salt-minion"], - "proxy": [self.install_dir / "salt-proxy"], - "run": [self.install_dir / "salt-run"], - "ssh": [self.install_dir / "salt-ssh"], - "syndic": [self.install_dir / "salt-syndic"], - "spm": [self.install_dir / "spm"], - "pip": [self.install_dir / "salt-pip"], - "python": [python_bin], - } + self.binary_paths["python"] = [shutil.which("salt"), "shell"] + if platform.is_darwin(): + self.binary_paths["pip"] = [self.run_root, "pip"] + self.binary_paths["spm"] = [shutil.which("salt-spm")] + else: + self.binary_paths["pip"] = [shutil.which("salt-pip")] @staticmethod def salt_factories_root_dir(system_service: bool = False) -> pathlib.Path: @@ -467,79 +393,10 @@ def _check_retcode(self, ret): assert ret.returncode == 0 return True - @property - def salt_hashes(self): - for _hash in self.hashes.keys(): - for fpath in ARTIFACTS_DIR.glob(f"**/*{_hash}*"): - fpath = str(fpath) - if re.search(f"{_hash}", fpath): - self.hashes[_hash]["file"] = fpath - - return self.hashes - - def _install_ssm_service(self): - # Register the services - # run_root and ssm_bin are configured in helper.py to point to the - # correct binary location - log.debug("Installing master service") - ret = self.proc.run( - str(self.ssm_bin), - "install", - "salt-master", - str(self.run_root), - "master", - "-c", - str(self.conf_dir), - ) - self._check_retcode(ret) - log.debug("Installing minion service") - ret = self.proc.run( - str(self.ssm_bin), - "install", - "salt-minion", - str(self.run_root), - "minion", - "-c", - str(self.conf_dir), - ) - self._check_retcode(ret) - log.debug("Installing api service") - ret = self.proc.run( - str(self.ssm_bin), - "install", - "salt-api", - str(self.run_root), - "api", - "-c", - str(self.conf_dir), - ) - self._check_retcode(ret) - - def _install_compressed(self, upgrade=False): - pkg = self.pkgs[0] - log.info("Installing %s", pkg) - if platform.is_windows(): - if pkg.endswith("zip"): - # Extract the files - log.debug("Extracting zip file") - with ZipFile(pkg, "r") as zip: - zip.extractall(path=self.root) - elif pkg.endswith("exe") or pkg.endswith("msi"): - log.error("Not a compressed package type: %s", pkg) - else: - log.error("Unknown package type: %s", pkg) - if self.system_service: - self._install_ssm_service() - elif platform.is_darwin(): - log.debug("Extracting tarball into %s", self.root) - with tarfile.open(pkg) as tar: # , "r:gz") - tar.extractall(path=str(self.root)) - else: - log.debug("Extracting tarball into %s", self.root) - with tarfile.open(pkg) as tar: # , "r:gz") - tar.extractall(path=str(self.root)) - - def _install_pkgs(self, upgrade=False): + def _install_pkgs(self, upgrade=False, downgrade=False): + if downgrade: + self.install_previous(downgrade=downgrade) + return True pkg = self.pkgs[0] if platform.is_windows(): if upgrade: @@ -554,29 +411,27 @@ def _install_pkgs(self, upgrade=False): elif pkg.endswith("msi"): # Install the package log.debug("Installing: %s", str(pkg)) - # START_MINION="" does not work as documented. The service is - # still starting. We need to fix this for RC2 - ret = self.proc.run( - "msiexec.exe", "/qn", "/i", str(pkg), 'START_MINION=""' - ) + # Write a batch file to run the installer. It is impossible to + # perform escaping of the START_MINION property that the MSI + # expects unless we do it via a batch file + batch_file = pathlib.Path(pkg).parent / "install_msi.cmd" + batch_content = f'msiexec /qn /i "{str(pkg)}" START_MINION=""\n' + with salt.utils.files.fopen(batch_file, "w") as fp: + fp.write(batch_content) + # Now run the batch file + ret = self.proc.run("cmd.exe", "/c", str(batch_file)) self._check_retcode(ret) else: log.error("Invalid package: %s", pkg) return False - # Stop the service installed by the installer. We only need this - # until we fix the issue where the MSI installer is starting the - # salt-minion service when it shouldn't - log.debug("Removing installed salt-minion service") - self.proc.run(str(self.ssm_bin), "stop", "salt-minion") - # Remove the service installed by the installer log.debug("Removing installed salt-minion service") self.proc.run(str(self.ssm_bin), "remove", "salt-minion", "confirm") self.update_process_path() elif platform.is_darwin(): - daemons_dir = pathlib.Path(os.sep, "Library", "LaunchDaemons") + daemons_dir = pathlib.Path("/Library", "LaunchDaemons") service_name = "com.saltstack.salt.minion" plist_file = daemons_dir / f"{service_name}.plist" log.debug("Installing: %s", str(pkg)) @@ -586,25 +441,52 @@ def _install_pkgs(self, upgrade=False): self.proc.run("launchctl", "disable", f"system/{service_name}") self.proc.run("launchctl", "bootout", "system", str(plist_file)) elif upgrade: + env = os.environ.copy() + extra_args = [] + if self.distro_id in ("ubuntu", "debian"): + env["DEBIAN_FRONTEND"] = "noninteractive" + extra_args = [ + "-o", + "DPkg::Options::=--force-confdef", + "-o", + "DPkg::Options::=--force-confold", + ] log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) - ret = self.proc.run(self.pkg_mngr, "upgrade", "-y", *self.pkgs) + args = extra_args + self.pkgs + upgrade_cmd = "upgrade" + if self.distro_id == "photon": + # tdnf does not detect nightly build versions to be higher version + # than release versions + upgrade_cmd = "install" + ret = self.proc.run( + self.pkg_mngr, + upgrade_cmd, + "-y", + *args, + _timeout=120, + env=env, + ) else: log.info("Installing packages:\n%s", pprint.pformat(self.pkgs)) ret = self.proc.run(self.pkg_mngr, "install", "-y", *self.pkgs) - if not (platform.is_darwin() or platform.is_windows()): + if not platform.is_darwin() and not platform.is_windows(): # Make sure we don't have any trailing references to old package file locations - assert "No such file or directory" not in ret.stdout + assert ret.returncode == 0 assert "/saltstack/salt/run" not in ret.stdout log.info(ret) self._check_retcode(ret) - def install(self, upgrade=False): - if self.compressed: - self._install_compressed(upgrade=upgrade) - else: - self._install_pkgs(upgrade=upgrade) - if self.distro_id in ("ubuntu", "debian"): - self.stop_services() + def package_python_version(self): + return self.proc.run( + str(self.binary_paths["python"][0]), + "-c", + "import sys; print('{}.{}'.format(*sys.version_info))", + ).stdout.strip() + + def install(self, upgrade=False, downgrade=False): + self._install_pkgs(upgrade=upgrade, downgrade=downgrade) + if self.distro_id in ("ubuntu", "debian"): + self.stop_services() def stop_services(self): """ @@ -613,28 +495,28 @@ def stop_services(self): settings we have set. This will also verify the expected services are up and running. """ + retval = True for service in ["salt-syndic", "salt-master", "salt-minion"]: check_run = self.proc.run("systemctl", "status", service) if check_run.returncode != 0: # The system was not started automatically and we # are expecting it to be on install log.debug("The service %s was not started on install.", service) - return False - stop_service = self.proc.run("systemctl", "stop", service) - self._check_retcode(stop_service) - return True + retval = False + else: + stop_service = self.proc.run("systemctl", "stop", service) + self._check_retcode(stop_service) + return retval - def install_previous(self): + def install_previous(self, downgrade=False): """ Install previous version. This is used for upgrade tests. """ - major_ver = self.major - minor_ver = self.minor - pkg_version = self.pkg_version - full_version = f"{self.major}.{self.minor}-{pkg_version}" - - min_ver = f"{major_ver}" + major_ver = packaging.version.parse(self.prev_version).major + relenv = packaging.version.parse(self.prev_version) >= packaging.version.parse( + "3006.0" + ) distro_name = self.distro_name if distro_name == "centos" or distro_name == "fedora": distro_name = "redhat" @@ -642,14 +524,30 @@ def install_previous(self): if self.classic: root_url = "py3/" - if self.distro_name in ["redhat", "centos", "amazon", "fedora"]: + if self.distro_name in [ + "redhat", + "centos", + "amazon", + "fedora", + "vmware", + "photon", + ]: + # Removing EPEL repo files for fp in pathlib.Path("/etc", "yum.repos.d").glob("epel*"): fp.unlink() gpg_key = "SALTSTACK-GPG-KEY.pub" if self.distro_version == "9": gpg_key = "SALTSTACK-GPG-KEY2.pub" + if relenv: + gpg_key = "SALT-PROJECT-GPG-PUBKEY-2023.pub" + if platform.is_aarch64(): - arch = "aarch64" + arch = "arm64" + # Starting with 3006.5, we prioritize the aarch64 repo paths for rpm-based distros + if packaging.version.parse( + self.prev_version + ) >= packaging.version.parse("3006.5"): + arch = "aarch64" else: arch = "x86_64" ret = self.proc.run( @@ -662,12 +560,34 @@ def install_previous(self): f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}.repo", f"/etc/yum.repos.d/salt-{distro_name}.repo", ) - ret = self.proc.run(self.pkg_mngr, "clean", "expire-cache") + if self.distro_name == "photon": + # yum version on photon doesn't support expire-cache + ret = self.proc.run(self.pkg_mngr, "clean", "all") + else: + ret = self.proc.run(self.pkg_mngr, "clean", "expire-cache") self._check_retcode(ret) + cmd_action = "downgrade" if downgrade else "install" + pkgs_to_install = self.salt_pkgs.copy() + if self.distro_version == "8" and self.classic: + # centosstream 8 doesn't downgrade properly using the downgrade command for some reason + # So we explicitly install the correct version here + list_ret = self.proc.run( + self.pkg_mngr, "list", "--available", "salt" + ).stdout.split("\n") + list_ret = [_.strip() for _ in list_ret] + idx = list_ret.index("Available Packages") + old_ver = list_ret[idx + 1].split()[1] + pkgs_to_install = [f"{pkg}-{old_ver}" for pkg in pkgs_to_install] + if self.dbg_pkg: + # self.dbg_pkg does not exist on classic packages + dbg_exists = [x for x in pkgs_to_install if self.dbg_pkg in x] + if dbg_exists: + pkgs_to_install.remove(dbg_exists[0]) + cmd_action = "install" ret = self.proc.run( self.pkg_mngr, - "install", - *self.salt_pkgs, + cmd_action, + *pkgs_to_install, "-y", ) self._check_retcode(ret) @@ -678,85 +598,143 @@ def install_previous(self): ret = self.proc.run(self.pkg_mngr, "install", "apt-transport-https", "-y") self._check_retcode(ret) ## only classic 3005 has arm64 support - if self.major >= "3006" and platform.is_aarch64(): + if relenv and platform.is_aarch64(): arch = "arm64" elif platform.is_aarch64() and self.classic: arch = "arm64" else: arch = "amd64" pathlib.Path("/etc/apt/keyrings").mkdir(parents=True, exist_ok=True) + gpg_dest = "salt-archive-keyring.gpg" + gpg_key = gpg_dest + if relenv: + gpg_key = "SALT-PROJECT-GPG-PUBKEY-2023.gpg" + download_file( - f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}/salt-archive-keyring.gpg", - "/etc/apt/keyrings/salt-archive-keyring.gpg", + f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver}/{gpg_key}", + f"/etc/apt/keyrings/{gpg_dest}", ) - with open( + with salt.utils.files.fopen( pathlib.Path("/etc", "apt", "sources.list.d", "salt.list"), "w" ) as fp: fp.write( - f"deb [signed-by=/etc/apt/keyrings/salt-archive-keyring.gpg arch={arch}] " + f"deb [signed-by=/etc/apt/keyrings/{gpg_dest} arch={arch}] " f"https://repo.saltproject.io/{root_url}{distro_name}/{self.distro_version}/{arch}/{major_ver} {self.distro_codename} main" ) - ret = self.proc.run(self.pkg_mngr, "update") self._check_retcode(ret) - ret = self.proc.run( + + cmd = [ self.pkg_mngr, "install", *self.salt_pkgs, "-y", - ) - self._check_retcode(ret) + ] + + if downgrade: + pref_file = pathlib.Path("/etc", "apt", "preferences.d", "salt.pref") + pref_file.parent.mkdir(exist_ok=True) + pref_file.write_text( + textwrap.dedent( + """\ + Package: salt* + Pin: origin "repo.saltproject.io" + Pin-Priority: 1001 + """ + ) + ) + cmd.append("--allow-downgrades") + env = os.environ.copy() + env["DEBIAN_FRONTEND"] = "noninteractive" + extra_args = [ + "-o", + "DPkg::Options::=--force-confdef", + "-o", + "DPkg::Options::=--force-confold", + ] + ret = self.proc.run(self.pkg_mngr, "update", *extra_args, env=env) + + cmd.extend(extra_args) + + ret = self.proc.run(*cmd, env=env) + # Pre-relenv packages down get downgraded to cleanly programmatically + # They work manually, and the install tests after downgrades will catch problems with the install + # Let's not check the returncode if this is the case + if not ( + downgrade + and packaging.version.parse(self.prev_version) + < packaging.version.parse("3006.0") + ): + self._check_retcode(ret) + if downgrade: + pref_file.unlink() self.stop_services() elif platform.is_windows(): - self.onedir = True - self.installer_pkg = True self.bin_dir = self.install_dir / "bin" - self.run_root = self.bin_dir / f"salt.exe" + self.run_root = self.bin_dir / "salt.exe" self.ssm_bin = self.bin_dir / "ssm.exe" - if self.file_ext == "msi": + if self.file_ext == "msi" or relenv: self.ssm_bin = self.install_dir / "ssm.exe" if not self.classic: - win_pkg = f"salt-{full_version}-windows-amd64.{self.file_ext}" - win_pkg_url = f"https://repo.saltproject.io/salt/py3/windows/{full_version}/{win_pkg}" + if not relenv: + win_pkg = ( + f"salt-{self.prev_version}-1-windows-amd64.{self.file_ext}" + ) + else: + if self.file_ext == "msi": + win_pkg = ( + f"Salt-Minion-{self.prev_version}-Py3-AMD64.{self.file_ext}" + ) + elif self.file_ext == "exe": + win_pkg = f"Salt-Minion-{self.prev_version}-Py3-AMD64-Setup.{self.file_ext}" + win_pkg_url = f"https://repo.saltproject.io/salt/py3/windows/{major_ver}/{win_pkg}" else: if self.file_ext == "msi": - win_pkg = f"Salt-Minion-{min_ver}-1-Py3-AMD64.{self.file_ext}" + win_pkg = ( + f"Salt-Minion-{self.prev_version}-Py3-AMD64.{self.file_ext}" + ) elif self.file_ext == "exe": - win_pkg = f"Salt-Minion-{min_ver}-1-Py3-AMD64-Setup.{self.file_ext}" + win_pkg = f"Salt-Minion-{self.prev_version}-Py3-AMD64-Setup.{self.file_ext}" win_pkg_url = f"https://repo.saltproject.io/windows/{win_pkg}" pkg_path = pathlib.Path(r"C:\TEMP", win_pkg) pkg_path.parent.mkdir(exist_ok=True) - ret = requests.get(win_pkg_url) + download_file(win_pkg_url, pkg_path) - with open(pkg_path, "wb") as fp: - fp.write(ret.content) if self.file_ext == "msi": - ret = self.proc.run( - "msiexec.exe", "/qn", "/i", str(pkg_path), 'START_MINION=""' - ) + # Write a batch file to run the installer. It is impossible to + # perform escaping of the START_MINION property that the MSI + # expects unless we do it via a batch file + batch_file = pkg_path.parent / "install_msi.cmd" + batch_content = f'msiexec /qn /i {str(pkg_path)} START_MINION=""' + with salt.utils.files.fopen(batch_file, "w") as fp: + fp.write(batch_content) + # Now run the batch file + ret = self.proc.run("cmd.exe", "/c", str(batch_file)) self._check_retcode(ret) else: ret = self.proc.run(pkg_path, "/start-minion=0", "/S") self._check_retcode(ret) - # Stop the service installed by the installer - log.debug("Removing installed salt-minion service") - self.proc.run(str(self.ssm_bin), "stop", "salt-minion") - log.debug("Removing installed salt-minion service") ret = self.proc.run(str(self.ssm_bin), "remove", "salt-minion", "confirm") self._check_retcode(ret) - if self.system_service: + if self.pkg_system_service: self._install_system_service() elif platform.is_darwin(): if self.classic: - mac_pkg = f"salt-{min_ver}.{minor_ver}-1-py3-x86_64.pkg" + mac_pkg = f"salt-{self.prev_version}-py3-x86_64.pkg" mac_pkg_url = f"https://repo.saltproject.io/osx/{mac_pkg}" else: - mac_pkg = f"salt-{min_ver}.{minor_ver}-1-macos-x86_64.pkg" - mac_pkg_url = f"https://repo.saltproject.io/salt/py3/macos/{major_ver}.{minor_ver}-1/{mac_pkg}" + if not relenv: + mac_pkg = f"salt-{self.prev_version}-1-macos-x86_64.pkg" + else: + mac_pkg = f"salt-{self.prev_version}-py3-x86_64.pkg" + mac_pkg_url = ( + f"https://repo.saltproject.io/salt/py3/macos/{major_ver}/{mac_pkg}" + ) + mac_pkg_path = f"/tmp/{mac_pkg}" if not os.path.exists(mac_pkg_path): download_file( @@ -767,28 +745,21 @@ def install_previous(self): ret = self.proc.run("installer", "-pkg", mac_pkg_path, "-target", "/") self._check_retcode(ret) - def _uninstall_compressed(self): + def uninstall(self): + pkg = self.pkgs[0] if platform.is_windows(): - if self.system_service: - # Uninstall the services - log.debug("Uninstalling master service") - self.proc.run(str(self.ssm_bin), "stop", "salt-master") - self.proc.run(str(self.ssm_bin), "remove", "salt-master", "confirm") - log.debug("Uninstalling minion service") - self.proc.run(str(self.ssm_bin), "stop", "salt-minion") - self.proc.run(str(self.ssm_bin), "remove", "salt-minion", "confirm") - log.debug("Uninstalling api service") - self.proc.run(str(self.ssm_bin), "stop", "salt-api") - self.proc.run(str(self.ssm_bin), "remove", "salt-api", "confirm") - log.debug("Removing the Salt Service Manager") - if self.ssm_bin: - try: - self.ssm_bin.unlink() - except PermissionError: - atexit.register(self.ssm_bin.unlink) - if platform.is_darwin(): + log.info("Uninstalling %s", pkg) + if pkg.endswith("exe"): + uninst = self.install_dir / "uninst.exe" + ret = self.proc.run(uninst, "/S") + self._check_retcode(ret) + elif pkg.endswith("msi"): + ret = self.proc.run("msiexec.exe", "/qn", "/x", pkg) + self._check_retcode(ret) + + elif platform.is_darwin(): # From here: https://stackoverflow.com/a/46118276/4581998 - daemons_dir = pathlib.Path(os.sep, "Library", "LaunchDaemons") + daemons_dir = pathlib.Path("/Library", "LaunchDaemons") for service in ("minion", "master", "api", "syndic"): service_name = f"com.saltstack.salt.{service}" plist_file = daemons_dir / f"{service_name}.plist" @@ -832,82 +803,19 @@ def _uninstall_compressed(self): # Remove receipt self.proc.run("pkgutil", "--forget", "com.saltstack.salt") - if self.singlebin: - log.debug("Deleting the salt binary: %s", self.run_root) - if self.run_root: - try: - self.run_root.unlink() - except PermissionError: - atexit.register(self.run_root.unlink) - else: log.debug("Deleting the onedir directory: %s", self.root / "salt") shutil.rmtree(str(self.root / "salt")) - - def _uninstall_pkgs(self): - pkg = self.pkgs[0] - if platform.is_windows(): - log.info("Uninstalling %s", pkg) - if pkg.endswith("exe"): - uninst = self.install_dir / "uninst.exe" - ret = self.proc.run(uninst, "/S") - self._check_retcode(ret) - elif pkg.endswith("msi"): - ret = self.proc.run("msiexec.exe", "/qn", "/x", pkg) - self._check_retcode(ret) - - elif platform.is_darwin(): - self._uninstall_compressed() else: log.debug("Un-Installing packages:\n%s", pprint.pformat(self.salt_pkgs)) ret = self.proc.run(self.pkg_mngr, self.rm_pkg, "-y", *self.salt_pkgs) self._check_retcode(ret) - def uninstall(self): - if self.compressed: - self._uninstall_compressed() - else: - self._uninstall_pkgs() - - def assert_uninstalled(self): - """ - Assert that the paths in /opt/saltstack/ were correctly - removed or not removed - """ - return - if platform.is_windows(): - # I'm not sure where the /opt/saltstack path is coming from - # This is the path we're using to test windows - opt_path = pathlib.Path(os.getenv("LocalAppData"), "salt", "pypath") - else: - opt_path = pathlib.Path(os.sep, "opt", "saltstack", "salt", "pypath") - if not opt_path.exists(): - if platform.is_windows(): - assert not opt_path.parent.exists() - else: - assert not opt_path.parent.parent.exists() - else: - opt_path_contents = list(opt_path.rglob("*")) - if not opt_path_contents: - pytest.fail( - f"The path '{opt_path}' exists but there are no files in it." - ) - else: - for path in list(opt_path_contents): - if path.name in (".installs.json", "__pycache__"): - opt_path_contents.remove(path) - if opt_path_contents: - pytest.fail( - "The test left some files behind: {}".format( - ", ".join([str(p) for p in opt_path_contents]) - ) - ) - def write_launchd_conf(self, service): service_name = f"com.saltstack.salt.{service}" ret = self.proc.run("launchctl", "list", service_name) # 113 means it couldn't find a service with that name if ret.returncode == 113: - daemons_dir = pathlib.Path(os.sep, "Library", "LaunchDaemons") + daemons_dir = pathlib.Path("/Library", "LaunchDaemons") plist_file = daemons_dir / f"{service_name}.plist" # Make sure we're using this plist file if plist_file.exists(): @@ -915,8 +823,7 @@ def write_launchd_conf(self, service): plist_file.unlink() log.debug("Creating plist file for service: %s", service) - contents = textwrap.dedent( - f"""\ + contents = f"""\ @@ -928,9 +835,12 @@ def write_launchd_conf(self, service): KeepAlive ProgramArguments - - {self.run_root} - {service} + """ + for part in self.binary_paths[service]: + contents += ( + f"""\n {part}\n""" + ) + contents += f"""\ -c {self.conf_dir} @@ -947,8 +857,7 @@ def write_launchd_conf(self, service): """ - ) - plist_file.write_text(contents, encoding="utf-8") + plist_file.write_text(textwrap.dedent(contents), encoding="utf-8") contents = plist_file.read_text() log.debug("Created '%s'. Contents:\n%s", plist_file, contents) @@ -959,7 +868,7 @@ def write_systemd_conf(self, service, binary): ret = self.proc.run("systemctl", "daemon-reload") self._check_retcode(ret) ret = self.proc.run("systemctl", "status", service) - if ret.returncode in (3, 4): + if ret.returncode == 4: log.warning( "No systemd unit file was found for service %s. Creating one.", service ) @@ -983,9 +892,7 @@ def write_systemd_conf(self, service, binary): binary = shutil.which(binary[0]) or binary[0] elif isinstance(binary, list): binary = " ".join(binary) - unit_path = pathlib.Path( - os.sep, "etc", "systemd", "system", f"{service}.service" - ) + unit_path = pathlib.Path(f"/etc/systemd/system/{service}.service") contents = contents.format( service=service, tgt=binary, conf_dir=self.conf_dir ) @@ -1009,15 +916,17 @@ def __enter__(self): def __exit__(self, *_): if not self.no_uninstall: self.uninstall() - self.assert_uninstalled() class PkgSystemdSaltDaemonImpl(SystemdSaltDaemonImpl): + # pylint: disable=access-member-before-definition def get_service_name(self): if self._service_name is None: self._service_name = self.factory.script_name return self._service_name + # pylint: enable=access-member-before-definition + @attr.s(kw_only=True) class PkgLaunchdSaltDaemonImpl(PkgSystemdSaltDaemonImpl): @@ -1026,7 +935,7 @@ class PkgLaunchdSaltDaemonImpl(PkgSystemdSaltDaemonImpl): @plist_file.default def _default_plist_file(self): - daemons_dir = pathlib.Path(os.sep, "Library", "LaunchDaemons") + daemons_dir = pathlib.Path("/Library", "LaunchDaemons") return daemons_dir / f"{self.get_service_name()}.plist" def get_service_name(self): @@ -1096,12 +1005,12 @@ def _terminate(self): # We completely override the parent class method because we're not using # the self._terminal property, it's a launchd service if self._process is None: # pragma: no cover + # pylint: disable=access-member-before-definition if TYPE_CHECKING: # Make mypy happy assert self._terminal_result - return ( - self._terminal_result - ) # pylint: disable=access-member-before-definition + return self._terminal_result + # pylint: enable=access-member-before-definition atexit.unregister(self.terminate) log.info("Stopping %s", self.factory) @@ -1109,12 +1018,10 @@ def _terminate(self): # Collect any child processes information before terminating the process with contextlib.suppress(psutil.NoSuchProcess): for child in psutil.Process(pid).children(recursive=True): - if ( - child not in self._children - ): # pylint: disable=access-member-before-definition - self._children.append( - child - ) # pylint: disable=access-member-before-definition + # pylint: disable=access-member-before-definition + if child not in self._children: + self._children.append(child) + # pylint: enable=access-member-before-definition if self._process.is_running(): # pragma: no cover cmdline = _get_cmdline(self._process) @@ -1153,10 +1060,12 @@ def _terminate(self): slow_stop=self.factory.slow_stop, ) + # pylint: disable=access-member-before-definition if self._terminal_stdout is not None: - self._terminal_stdout.close() # pylint: disable=access-member-before-definition + self._terminal_stdout.close() if self._terminal_stderr is not None: - self._terminal_stderr.close() # pylint: disable=access-member-before-definition + self._terminal_stderr.close() + # pylint: enable=access-member-before-definition stdout = stderr = "" try: self._terminal_result = ProcessResult( @@ -1231,12 +1140,12 @@ def _terminate(self): # We completely override the parent class method because we're not using the # self._terminal property, it's a systemd service if self._process is None: # pragma: no cover + # pylint: disable=access-member-before-definition if TYPE_CHECKING: # Make mypy happy assert self._terminal_result - return ( - self._terminal_result - ) # pylint: disable=access-member-before-definition + return self._terminal_result + # pylint: enable=access-member-before-definition atexit.unregister(self.terminate) log.info("Stopping %s", self.factory) @@ -1244,12 +1153,10 @@ def _terminate(self): # Collect any child processes information before terminating the process with contextlib.suppress(psutil.NoSuchProcess): for child in psutil.Process(pid).children(recursive=True): - if ( - child not in self._children - ): # pylint: disable=access-member-before-definition - self._children.append( - child - ) # pylint: disable=access-member-before-definition + # pylint: disable=access-member-before-definition + if child not in self._children: + self._children.append(child) + # pylint: enable=access-member-before-definition if self._process.is_running(): # pragma: no cover cmdline = _get_cmdline(self._process) @@ -1289,10 +1196,12 @@ def _terminate(self): slow_stop=self.factory.slow_stop, ) + # pylint: disable=access-member-before-definition if self._terminal_stdout is not None: - self._terminal_stdout.close() # pylint: disable=access-member-before-definition + self._terminal_stdout.close() if self._terminal_stderr is not None: - self._terminal_stderr.close() # pylint: disable=access-member-before-definition + self._terminal_stderr.close() + # pylint: enable=access-member-before-definition stdout = stderr = "" try: self._terminal_result = ProcessResult( @@ -1313,11 +1222,7 @@ class PkgMixin: salt_pkg_install: SaltPkgInstall = attr.ib() def get_script_path(self): - if self.salt_pkg_install.compressed or ( - platform.is_darwin() - and self.salt_pkg_install.classic - and self.salt_pkg_install.upgrade - ): + if platform.is_darwin() and self.salt_pkg_install.classic: if self.salt_pkg_install.run_root and os.path.exists( self.salt_pkg_install.run_root ): @@ -1328,23 +1233,8 @@ def get_script_path(self): return str(self.salt_pkg_install.install_dir / self.script_name) return super().get_script_path() - def get_base_script_args(self): - base_script_args = [] - if self.salt_pkg_install.run_root and os.path.exists( - self.salt_pkg_install.run_root - ): - if self.salt_pkg_install.compressed: - if self.script_name == "spm": - base_script_args.append(self.script_name) - elif self.script_name != "salt": - base_script_args.append(self.script_name.split("salt-")[-1]) - base_script_args.extend(super().get_base_script_args()) - return base_script_args - def cmdline(self, *args, **kwargs): _cmdline = super().cmdline(*args, **kwargs) - if self.salt_pkg_install.compressed is False: - return _cmdline if _cmdline[0] == self.python_executable: _cmdline.pop(0) return _cmdline @@ -1353,7 +1243,7 @@ def cmdline(self, *args, **kwargs): @attr.s(kw_only=True) class DaemonPkgMixin(PkgMixin): def __attrs_post_init__(self): - if not platform.is_windows() and self.salt_pkg_install.system_service: + if not platform.is_windows() and self.salt_pkg_install.pkg_system_service: if platform.is_darwin(): self.write_launchd_conf() else: @@ -1381,7 +1271,7 @@ def __attrs_post_init__(self): DaemonPkgMixin.__attrs_post_init__(self) def _get_impl_class(self): - if self.system_install and self.salt_pkg_install.system_service: + if self.system_service and self.salt_pkg_install.pkg_system_service: if platform.is_windows(): return PkgSsmSaltDaemonImpl if platform.is_darwin(): @@ -1410,17 +1300,21 @@ def salt_api_daemon(self, **kwargs): factory_class=SaltApi, salt_pkg_install=self.salt_pkg_install, **kwargs ) - def salt_key_cli(self, **factory_class_kwargs): + def salt_key_cli(self, factory_class=None, **factory_class_kwargs): + if not factory_class: + factory_class = SaltKey + factory_class_kwargs["salt_pkg_install"] = self.salt_pkg_install return super().salt_key_cli( - factory_class=SaltKey, - salt_pkg_install=self.salt_pkg_install, + factory_class=factory_class, **factory_class_kwargs, ) - def salt_cli(self, **factory_class_kwargs): + def salt_cli(self, factory_class=None, **factory_class_kwargs): + if not factory_class: + factory_class = SaltCli + factory_class_kwargs["salt_pkg_install"] = self.salt_pkg_install return super().salt_cli( - factory_class=SaltCli, - salt_pkg_install=self.salt_pkg_install, + factory_class=factory_class, **factory_class_kwargs, ) @@ -1462,7 +1356,7 @@ def __attrs_post_init__(self): DaemonPkgMixin.__attrs_post_init__(self) def _get_impl_class(self): - if self.system_install and self.salt_pkg_install.system_service: + if self.system_service and self.salt_pkg_install.pkg_system_service: if platform.is_windows(): return PkgSsmSaltDaemonImpl if platform.is_darwin(): @@ -1478,10 +1372,12 @@ def write_systemd_conf(self): "salt-minion", self.salt_pkg_install.binary_paths["minion"] ) - def salt_call_cli(self, **factory_class_kwargs): + def salt_call_cli(self, factory_class=None, **factory_class_kwargs): + if not factory_class: + factory_class = SaltCall + factory_class_kwargs["salt_pkg_install"] = self.salt_pkg_install return super().salt_call_cli( - factory_class=SaltCall, - salt_pkg_install=self.salt_pkg_install, + factory_class=factory_class, **factory_class_kwargs, ) @@ -1498,7 +1394,7 @@ def __attrs_post_init__(self): DaemonPkgMixin.__attrs_post_init__(self) def _get_impl_class(self): - if self.system_install and self.salt_pkg_install.system_service: + if self.system_service and self.salt_pkg_install.pkg_system_service: if platform.is_windows(): return PkgSsmSaltDaemonImpl if platform.is_darwin(): @@ -1528,14 +1424,14 @@ def __attrs_post_init__(self): @attr.s(kw_only=True, slots=True) -class SaltCli(PkgMixin, salt.SaltCli): +class SaltCli(PkgMixin, saltfactories.cli.salt.SaltCli): """ Subclassed just to tweak the binary paths if needed. """ def __attrs_post_init__(self): self.script_name = "salt" - salt.SaltCli.__attrs_post_init__(self) + saltfactories.cli.salt.SaltCli.__attrs_post_init__(self) @attr.s(kw_only=True, slots=True) @@ -1549,82 +1445,10 @@ def __attrs_post_init__(self): key.SaltKey.__attrs_post_init__(self) -@attr.s(kw_only=True, slots=True) -class TestUser: - """ - Add a test user - """ - - salt_call_cli = attr.ib() - - username = attr.ib(default="saltdev") - # Must follow Windows Password Complexity requirements - password = attr.ib(default="P@ssW0rd") - _pw_record = attr.ib(init=False, repr=False, default=None) - - def salt_call_local(self, *args): - ret = self.salt_call_cli.run("--local", *args) - if ret.returncode != 0: - log.error(ret) - assert ret.returncode == 0 - return ret.data - - def add_user(self): - log.debug("Adding system account %r", self.username) - if platform.is_windows(): - self.salt_call_local("user.add", self.username, self.password) - else: - self.salt_call_local("user.add", self.username) - hash_passwd = crypt.crypt(self.password, crypt.mksalt(crypt.METHOD_SHA512)) - self.salt_call_local("shadow.set_password", self.username, hash_passwd) - assert self.username in self.salt_call_local("user.list_users") - - def remove_user(self): - log.debug("Removing system account %r", self.username) - if platform.is_windows(): - self.salt_call_local( - "user.delete", self.username, "purge=True", "force=True" - ) - else: - self.salt_call_local("user.delete", self.username, "remove=True") - - @property - def pw_record(self): - if self._pw_record is None and HAS_PWD: - self._pw_record = pwd.getpwnam(self.username) - return self._pw_record - - @property - def uid(self): - if HAS_PWD: - return self.pw_record.pw_uid - return None - - @property - def gid(self): - if HAS_PWD: - return self.pw_record.pw_gid - return None - - @property - def env(self): - environ = os.environ.copy() - environ["LOGNAME"] = environ["USER"] = self.username - environ["HOME"] = self.pw_record.pw_dir - return environ - - def __enter__(self): - self.add_user() - return self - - def __exit__(self, *_): - self.remove_user() - - @attr.s(kw_only=True, slots=True) class ApiRequest: - salt_api: SaltApi = attr.ib(repr=False) - test_account: TestUser = attr.ib(repr=False) + port: int = attr.ib(repr=False) + account: TestAccount = attr.ib(repr=False) session: requests.Session = attr.ib(init=False, repr=False) api_uri: str = attr.ib(init=False) auth_data: Dict[str, str] = attr.ib(init=False) @@ -1635,13 +1459,13 @@ def _default_session(self): @api_uri.default def _default_api_uri(self): - return f"http://localhost:{self.salt_api.config['rest_cherrypy']['port']}" + return f"http://localhost:{self.port}" @auth_data.default def _default_auth_data(self): return { - "username": self.test_account.username, - "password": self.test_account.password, + "username": self.account.username, + "password": self.account.password, "eauth": "auto", "out": "json", } @@ -1661,44 +1485,12 @@ def __exit__(self, *args): @pytest.helpers.register -def remove_stale_minion_key(master, minion_id): - key_path = os.path.join(master.config["pki_dir"], "minions", minion_id) - if os.path.exists(key_path): - os.unlink(key_path) - else: - log.debug("The minion(id=%r) key was not found at %s", minion_id, key_path) - - -@pytest.helpers.register -def remove_stale_master_key(master): - keys_path = os.path.join(master.config["pki_dir"], "master") - for key_name in ("master.pem", "master.pub"): - key_path = os.path.join(keys_path, key_name) - if os.path.exists(key_path): - os.unlink(key_path) - else: - log.debug( - "The master(id=%r) %s key was not found at %s", - master.id, - key_name, - key_path, - ) - key_path = os.path.join(master.config["pki_dir"], "minion", "minion_master.pub") - if os.path.exists(key_path): - os.unlink(key_path) - else: - log.debug( - "The master(id=%r) minion_master.pub key was not found at %s", - master.id, - key_path, - ) - - -def download_file(url, dest): +def download_file(url, dest, auth=None): # NOTE the stream=True parameter below - with requests.get(url, stream=True) as r: + with requests.get(url, stream=True, auth=auth) as r: r.raise_for_status() - with open(dest, "wb") as f: + with salt.utils.files.fopen(dest, "wb") as f: for chunk in r.iter_content(chunk_size=8192): if chunk: f.write(chunk) + return dest diff --git a/tests/support/pytest/helpers.py b/tests/support/pytest/helpers.py index 441e4ea3af9b..fa12784c04ec 100644 --- a/tests/support/pytest/helpers.py +++ b/tests/support/pytest/helpers.py @@ -174,6 +174,31 @@ def remove_stale_minion_key(master, minion_id): log.debug("The minion(id=%r) key was not found at %s", minion_id, key_path) +@pytest.helpers.register +def remove_stale_master_key(master): + keys_path = os.path.join(master.config["pki_dir"], "master") + for key_name in ("master.pem", "master.pub"): + key_path = os.path.join(keys_path, key_name) + if os.path.exists(key_path): + os.unlink(key_path) + else: + log.debug( + "The master(id=%r) %s key was not found at %s", + master.id, + key_name, + key_path, + ) + key_path = os.path.join(master.config["pki_dir"], "minion", "minion_master.pub") + if os.path.exists(key_path): + os.unlink(key_path) + else: + log.debug( + "The master(id=%r) minion_master.pub key was not found at %s", + master.id, + key_path, + ) + + @pytest.helpers.register def remove_stale_proxy_minion_cache_file(proxy_minion, minion_id=None): cachefile = os.path.join( diff --git a/tests/support/pytest/mysql.py b/tests/support/pytest/mysql.py index 218c38686e7b..ac3b6601d7fe 100644 --- a/tests/support/pytest/mysql.py +++ b/tests/support/pytest/mysql.py @@ -3,6 +3,7 @@ import attr import pytest +from pytestskipmarkers.utils import platform from saltfactories.utils import random_string # This `pytest.importorskip` here actually works because this module @@ -61,7 +62,7 @@ def get_credentials(self, **kwargs): def get_test_versions(): test_versions = [] - name = "mysql/mysql-server" + name = "mysql-server" for version in ("5.5", "5.6", "5.7", "8.0"): test_versions.append( MySQLImage( @@ -71,7 +72,7 @@ def get_test_versions(): ) ) name = "mariadb" - for version in ("10.3", "10.4", "10.5", "10.6"): + for version in ("10.3", "10.4", "10.5"): test_versions.append( MySQLImage( name=name, @@ -80,7 +81,7 @@ def get_test_versions(): ) ) name = "percona" - for version in ("5.5", "5.6", "5.7", "8.0"): + for version in ("5.6", "5.7", "8.0"): test_versions.append( MySQLImage( name=name, @@ -102,6 +103,10 @@ def mysql_image(request): @pytest.fixture(scope="module") def create_mysql_combo(mysql_image): + if platform.is_fips_enabled(): + if mysql_image.name in ("mysql-server", "percona") and mysql_image.tag == "8.0": + pytest.skip(f"These tests fail on {mysql_image.name}:{mysql_image.tag}") + return MySQLCombo( mysql_name=mysql_image.name, mysql_version=mysql_image.tag, diff --git a/tests/support/pytest/transport.py b/tests/support/pytest/transport.py index eaa8adc8bd45..d71e8fe02543 100644 --- a/tests/support/pytest/transport.py +++ b/tests/support/pytest/transport.py @@ -130,10 +130,10 @@ def _run(self, loop): while True: curr_time = time.time() if time.time() > self.hard_timeout: - log.error("Hard timeout reaced in test collector!") + log.error("Hard timeout reached in test collector!") break if curr_time - last_msg >= self.timeout: - log.error("Receive timeout reaced in test collector!") + log.error("Receive timeout reached in test collector!") break try: payload = yield self._recv() diff --git a/tests/support/virt.py b/tests/support/virt.py index 8f7e74f4e693..213b88b4d9ca 100644 --- a/tests/support/virt.py +++ b/tests/support/virt.py @@ -1,3 +1,5 @@ +import logging +import sys import time import uuid @@ -7,6 +9,8 @@ from tests.conftest import CODE_DIR +log = logging.getLogger(__name__) + @attr.s(kw_only=True, slots=True) class SaltVirtMinionContainerFactory(SaltMinion): @@ -54,7 +58,6 @@ def __attrs_post_init__(self): self.container_run_kwargs["volumes"].update( { str(CODE_DIR): {"bind": "/salt", "mode": "z"}, - str(CODE_DIR): {"bind": str(CODE_DIR), "mode": "z"}, } ) self.container_run_kwargs["working_dir"] = str(CODE_DIR) @@ -65,6 +68,7 @@ def __attrs_post_init__(self): self.container_start_check(self._check_script_path_exists) for port in (self.sshd_port, self.libvirt_tcp_port, self.libvirt_tls_port): self.check_ports[port] = port + self.before_start(self._install_salt_in_container) def _check_script_path_exists(self, timeout_at): while time.time() <= timeout_at: @@ -77,3 +81,37 @@ def _check_script_path_exists(self, timeout_at): else: return False return True + + def _install_salt_in_container(self): + ret = self.run("bash", "-c", "echo $SALT_PY_VERSION") + assert ret.returncode == 0 + if not ret.stdout: + log.warning( + "The 'SALT_PY_VERSION' environment variable is not set on the container" + ) + salt_py_version = 3 + ret = self.run( + "python3", + "-c", + "import sys; sys.stderr.write('{}.{}'.format(*sys.version_info))", + ) + assert ret.returncode == 0 + if not ret.stdout: + requirements_py_version = "{}.{}".format(*sys.version_info) + else: + requirements_py_version = ret.stdout.strip() + else: + salt_py_version = requirements_py_version = ret.stdout.strip() + + self.python_executable = f"python{salt_py_version}" + + ret = self.run( + self.python_executable, + "-m", + "pip", + "install", + f"--constraint=/salt/requirements/static/ci/py{requirements_py_version}/linux.txt", + "/salt", + ) + log.debug("Install Salt in the container: %s", ret) + assert ret.returncode == 0 diff --git a/tests/unit/modules/test_boto3_elasticsearch.py b/tests/unit/modules/test_boto3_elasticsearch.py index 6b82c0abba73..0e60a9e07461 100644 --- a/tests/unit/modules/test_boto3_elasticsearch.py +++ b/tests/unit/modules/test_boto3_elasticsearch.py @@ -28,6 +28,10 @@ # https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12 REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def __virtual__(): """ diff --git a/tests/unit/modules/test_boto3_route53.py b/tests/unit/modules/test_boto3_route53.py index 9d4214719421..5e7332fbb355 100644 --- a/tests/unit/modules/test_boto3_route53.py +++ b/tests/unit/modules/test_boto3_route53.py @@ -25,6 +25,10 @@ # https://github.com/boto/boto/commit/33ac26b416fbb48a60602542b4ce15dcc7029f12 REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + def __virtual__(): """ diff --git a/tests/unit/modules/test_boto_apigateway.py b/tests/unit/modules/test_boto_apigateway.py index 5f3d2a498227..e6bb33a47dc2 100644 --- a/tests/unit/modules/test_boto_apigateway.py +++ b/tests/unit/modules/test_boto_apigateway.py @@ -23,6 +23,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_cloudtrail.py b/tests/unit/modules/test_boto_cloudtrail.py index de31ff955a0c..3b6488b31297 100644 --- a/tests/unit/modules/test_boto_cloudtrail.py +++ b/tests/unit/modules/test_boto_cloudtrail.py @@ -22,6 +22,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_cloudtrail module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_cloudwatch_event.py b/tests/unit/modules/test_boto_cloudwatch_event.py index 82d158104aa2..4d37747b8f7f 100644 --- a/tests/unit/modules/test_boto_cloudwatch_event.py +++ b/tests/unit/modules/test_boto_cloudwatch_event.py @@ -22,6 +22,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import log = logging.getLogger(__name__) diff --git a/tests/unit/modules/test_boto_cognitoidentity.py b/tests/unit/modules/test_boto_cognitoidentity.py index 1e213a169ac4..51ae9075a0ba 100644 --- a/tests/unit/modules/test_boto_cognitoidentity.py +++ b/tests/unit/modules/test_boto_cognitoidentity.py @@ -21,6 +21,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_dynamodb.py b/tests/unit/modules/test_boto_dynamodb.py deleted file mode 100644 index 571fad5b1baf..000000000000 --- a/tests/unit/modules/test_boto_dynamodb.py +++ /dev/null @@ -1,81 +0,0 @@ -import salt.modules.boto_dynamodb as boto_dynamodb -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -ARN = "arn:aws:dynamodb:us-east-1:012345678901:table/my-table" -TAGS = {"foo": "bar", "hello": "world"} -TAGS_AS_LIST = [{"Key": "foo", "Value": "bar"}, {"Key": "hello", "Value": "world"}] - - -class DummyConn: - def __init__(self): - self.list_tags_of_resource = MagicMock( - return_value={"Tags": TAGS_AS_LIST, "NextToken": None} - ) - self.tag_resource = MagicMock(return_value=True) - self.untag_resource = MagicMock(return_value=True) - - -class BotoDynamoDBTestCase(TestCase, LoaderModuleMockMixin): - """ - TestCase for salt.modules.boto_elb module - """ - - def setup_loader_modules(self): - return {boto_dynamodb: {"__opts__": {}, "__utils__": {}}} - - def test_list_tags_of_resource(self): - """ - Test that the correct API call is made and correct return format is - returned. - """ - conn = DummyConn() - utils = {"boto3.get_connection": MagicMock(return_value=conn)} - with patch.dict(boto_dynamodb.__utils__, utils): - ret = boto_dynamodb.list_tags_of_resource(resource_arn=ARN) - - assert ret == TAGS, ret - conn.list_tags_of_resource.assert_called_once_with( - ResourceArn=ARN, NextToken="" - ) - - def test_tag_resource(self): - """ - Test that the correct API call is made and correct return format is - returned. - """ - conn = DummyConn() - utils = {"boto3.get_connection": MagicMock(return_value=conn)} - with patch.dict(boto_dynamodb.__utils__, utils): - ret = boto_dynamodb.tag_resource(resource_arn=ARN, tags=TAGS) - - assert ret is True, ret - # Account for differing dict iteration order among Python versions by - # being more explicit in asserts. - assert len(conn.tag_resource.mock_calls) == 1 - call = conn.tag_resource.mock_calls[0] - # No positional args - assert not call.args - # Make sure there aren't any additional kwargs beyond what we expect - assert len(call.kwargs) == 2 - assert call.kwargs["ResourceArn"] == ARN - # Make sure there aren't any additional tags beyond what we expect - assert len(call.kwargs["Tags"]) == 2 - for tag_dict in TAGS_AS_LIST: - assert tag_dict in call.kwargs["Tags"] - - def test_untag_resource(self): - """ - Test that the correct API call is made and correct return format is - returned. - """ - conn = DummyConn() - utils = {"boto3.get_connection": MagicMock(return_value=conn)} - with patch.dict(boto_dynamodb.__utils__, utils): - ret = boto_dynamodb.untag_resource(resource_arn=ARN, tag_keys=sorted(TAGS)) - - assert ret is True, ret - conn.untag_resource.assert_called_once_with( - ResourceArn=ARN, TagKeys=sorted(TAGS) - ) diff --git a/tests/unit/modules/test_boto_elasticsearch_domain.py b/tests/unit/modules/test_boto_elasticsearch_domain.py index 5c5845aa25b5..e0329df5cec6 100644 --- a/tests/unit/modules/test_boto_elasticsearch_domain.py +++ b/tests/unit/modules/test_boto_elasticsearch_domain.py @@ -21,6 +21,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/modules/test_boto_iot.py b/tests/unit/modules/test_boto_iot.py index 7c96244ce089..8c61d86dd9b3 100644 --- a/tests/unit/modules/test_boto_iot.py +++ b/tests/unit/modules/test_boto_iot.py @@ -23,6 +23,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_iot module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_lambda.py b/tests/unit/modules/test_boto_lambda.py index d32dc9345b67..157e559207d9 100644 --- a/tests/unit/modules/test_boto_lambda.py +++ b/tests/unit/modules/test_boto_lambda.py @@ -26,6 +26,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module # the boto_lambda module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_boto_s3_bucket.py b/tests/unit/modules/test_boto_s3_bucket.py index 8e418a8293c3..90d868d11416 100644 --- a/tests/unit/modules/test_boto_s3_bucket.py +++ b/tests/unit/modules/test_boto_s3_bucket.py @@ -22,6 +22,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module,unused-import # the boto_s3_bucket module relies on the connect_to_region() method diff --git a/tests/unit/modules/test_config.py b/tests/unit/modules/test_config.py deleted file mode 100644 index d150ee7f4d40..000000000000 --- a/tests/unit/modules/test_config.py +++ /dev/null @@ -1,162 +0,0 @@ -import fnmatch - -import salt.modules.config as config -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import patch -from tests.support.unit import TestCase - -DEFAULTS = { - "test.option.foo": "value of test.option.foo in DEFAULTS", - "test.option.bar": "value of test.option.bar in DEFAULTS", - "test.option.baz": "value of test.option.baz in DEFAULTS", - "test.option": "value of test.option in DEFAULTS", -} - - -class TestModulesConfig(TestCase, LoaderModuleMockMixin): - - no_match = "test.option.nope" - opt_name = "test.option.foo" - wildcard_opt_name = "test.option.b*" - - def setup_loader_modules(self): - return { - config: { - "__opts__": { - "test.option.foo": "value of test.option.foo in __opts__", - "test.option.bar": "value of test.option.bar in __opts__", - "test.option.baz": "value of test.option.baz in __opts__", - }, - "__pillar__": { - "test.option.foo": "value of test.option.foo in __pillar__", - "test.option.bar": "value of test.option.bar in __pillar__", - "test.option.baz": "value of test.option.baz in __pillar__", - "master": { - "test.option.foo": "value of test.option.foo in master", - "test.option.bar": "value of test.option.bar in master", - "test.option.baz": "value of test.option.baz in master", - }, - }, - "__grains__": { - "test.option.foo": "value of test.option.foo in __grains__", - "test.option.bar": "value of test.option.bar in __grains__", - "test.option.baz": "value of test.option.baz in __grains__", - }, - } - } - - def _wildcard_match(self, data): - return {x: data[x] for x in fnmatch.filter(data, self.wildcard_opt_name)} - - def test_defaults_only_name(self): - with patch.dict(config.DEFAULTS, DEFAULTS): - opt_name = "test.option" - opt = config.option(opt_name) - self.assertEqual(opt, config.DEFAULTS[opt_name]) - - def test_no_match(self): - """ - Make sure that the defa - """ - with patch.dict(config.DEFAULTS, DEFAULTS): - ret = config.option(self.no_match) - assert ret == "", ret - - default = "wat" - ret = config.option(self.no_match, default=default) - assert ret == default, ret - - ret = config.option(self.no_match, wildcard=True) - assert ret == {}, ret - - default = {"foo": "bar"} - ret = config.option(self.no_match, default=default, wildcard=True) - assert ret == default, ret - - # Should be no match since wildcard=False - ret = config.option(self.wildcard_opt_name) - assert ret == "", ret - - def test_omits(self): - with patch.dict(config.DEFAULTS, DEFAULTS): - - # ********** OMIT NOTHING ********** - - # Match should be in __opts__ dict - ret = config.option(self.opt_name) - assert ret == config.__opts__[self.opt_name], ret - - # Wildcard match - ret = config.option(self.wildcard_opt_name, wildcard=True) - assert ret == self._wildcard_match(config.__opts__), ret - - # ********** OMIT __opts__ ********** - - # Match should be in __grains__ dict - ret = config.option(self.opt_name, omit_opts=True) - assert ret == config.__grains__[self.opt_name], ret - - # Wildcard match - ret = config.option(self.wildcard_opt_name, omit_opts=True, wildcard=True) - assert ret == self._wildcard_match(config.__grains__), ret - - # ********** OMIT __opts__, __grains__ ********** - - # Match should be in __pillar__ dict - ret = config.option(self.opt_name, omit_opts=True, omit_grains=True) - assert ret == config.__pillar__[self.opt_name], ret - - # Wildcard match - ret = config.option( - self.wildcard_opt_name, omit_opts=True, omit_grains=True, wildcard=True - ) - assert ret == self._wildcard_match(config.__pillar__), ret - - # ********** OMIT __opts__, __grains__, __pillar__ ********** - - # Match should be in master opts - ret = config.option( - self.opt_name, omit_opts=True, omit_grains=True, omit_pillar=True - ) - assert ret == config.__pillar__["master"][self.opt_name], ret - - # Wildcard match - ret = config.option( - self.wildcard_opt_name, - omit_opts=True, - omit_grains=True, - omit_pillar=True, - wildcard=True, - ) - assert ret == self._wildcard_match(config.__pillar__["master"]), ret - - # ********** OMIT ALL THE THINGS ********** - - # Match should be in master opts - ret = config.option( - self.opt_name, - omit_opts=True, - omit_grains=True, - omit_pillar=True, - omit_master=True, - ) - assert ret == config.DEFAULTS[self.opt_name], ret - - # Wildcard match - ret = config.option( - self.wildcard_opt_name, - omit_opts=True, - omit_grains=True, - omit_pillar=True, - omit_master=True, - wildcard=True, - ) - assert ret == self._wildcard_match(config.DEFAULTS), ret - - # Match should be in master opts - ret = config.option(self.opt_name, omit_all=True) - assert ret == config.DEFAULTS[self.opt_name], ret - - # Wildcard match - ret = config.option(self.wildcard_opt_name, omit_all=True, wildcard=True) - assert ret == self._wildcard_match(config.DEFAULTS), ret diff --git a/tests/unit/modules/test_deb_postgres.py b/tests/unit/modules/test_deb_postgres.py deleted file mode 100644 index 37e276fdffda..000000000000 --- a/tests/unit/modules/test_deb_postgres.py +++ /dev/null @@ -1,184 +0,0 @@ -import salt.modules.deb_postgres as deb_postgres -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import Mock, patch -from tests.support.unit import TestCase - -LSCLUSTER = """\ -8.4 main 5432 online postgres /srv/8.4/main \ - /var/log/postgresql/postgresql-8.4-main.log -9.1 main 5433 online postgres /srv/9.1/main \ - /var/log/postgresql/postgresql-9.1-main.log -""" - - -class PostgresClusterTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - self.cmd_run_all_mock = Mock(return_value={"stdout": LSCLUSTER}) - self.addCleanup(delattr, self, "cmd_run_all_mock") - patcher = patch( - "salt.utils.path.which", Mock(return_value="/usr/bin/pg_createcluster") - ) - patcher.start() - self.addCleanup(patcher.stop) - return { - deb_postgres: { - "__salt__": { - "config.option": Mock(), - "cmd.run_all": self.cmd_run_all_mock, - "file.chown": Mock(), - "file.remove": Mock(), - } - } - } - - def test_cluster_create(self): - deb_postgres.cluster_create( - "9.3", - "main", - port="5432", - locale="fr_FR", - encoding="UTF-8", - datadir="/opt/postgresql", - ) - cmdstr = ( - "/usr/bin/pg_createcluster " - "--port 5432 --locale fr_FR --encoding UTF-8 " - "--datadir /opt/postgresql " - "9.3 main" - ) - self.assertEqual(cmdstr, self.cmd_run_all_mock.call_args[0][0]) - - def test_cluster_create_with_initdb_options(self): - deb_postgres.cluster_create( - "11", - "main", - port="5432", - locale="fr_FR", - encoding="UTF-8", - datadir="/opt/postgresql", - allow_group_access=True, - data_checksums=True, - wal_segsize="32", - ) - cmdstr = ( - "/usr/bin/pg_createcluster " - "--port 5432 --locale fr_FR --encoding UTF-8 " - "--datadir /opt/postgresql " - "11 main " - "-- " - "--allow-group-access " - "--data-checksums " - "--wal-segsize 32" - ) - self.assertEqual(cmdstr, self.cmd_run_all_mock.call_args[0][0]) - - def test_cluster_create_with_float(self): - deb_postgres.cluster_create( - 9.3, - "main", - port="5432", - locale="fr_FR", - encoding="UTF-8", - datadir="/opt/postgresql", - ) - cmdstr = ( - "/usr/bin/pg_createcluster " - "--port 5432 --locale fr_FR --encoding UTF-8 " - "--datadir /opt/postgresql " - "9.3 main" - ) - self.assertEqual(cmdstr, self.cmd_run_all_mock.call_args[0][0]) - - -class PostgresLsClusterTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - self.cmd_run_all_mock = Mock(return_value={"stdout": LSCLUSTER}) - self.addCleanup(delattr, self, "cmd_run_all_mock") - patcher = patch( - "salt.utils.path.which", Mock(return_value="/usr/bin/pg_lsclusters") - ) - patcher.start() - self.addCleanup(patcher.stop) - return { - deb_postgres: { - "__salt__": { - "config.option": Mock(), - "cmd.run_all": self.cmd_run_all_mock, - "file.chown": Mock(), - "file.remove": Mock(), - } - } - } - - def test_parse_pg_lsclusters(self): - stdout = LSCLUSTER - self.maxDiff = None - self.assertDictEqual( - { - "8.4/main": { - "port": 5432, - "status": "online", - "user": "postgres", - "datadir": "/srv/8.4/main", - "log": "/var/log/postgresql/postgresql-8.4-main.log", - }, - "9.1/main": { - "port": 5433, - "status": "online", - "user": "postgres", - "datadir": "/srv/9.1/main", - "log": "/var/log/postgresql/postgresql-9.1-main.log", - }, - }, - deb_postgres._parse_pg_lscluster(stdout), - ) - - def test_cluster_list(self): - return_list = deb_postgres.cluster_list() - self.assertEqual( - "/usr/bin/pg_lsclusters --no-header", self.cmd_run_all_mock.call_args[0][0] - ) - return_dict = deb_postgres.cluster_list(verbose=True) - self.assertIsInstance(return_dict, dict) - - def test_cluster_exists(self): - self.assertTrue(deb_postgres.cluster_exists("8.4") is True) - self.assertTrue(deb_postgres.cluster_exists("8.4", "main") is True) - self.assertFalse(deb_postgres.cluster_exists("3.4", "main")) - - -class PostgresDeleteClusterTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - self.cmd_run_all_mock = Mock(return_value={"stdout": LSCLUSTER}) - self.addCleanup(delattr, self, "cmd_run_all_mock") - patcher = patch( - "salt.utils.path.which", Mock(return_value="/usr/bin/pg_dropcluster") - ) - patcher.start() - self.addCleanup(patcher.stop) - return { - deb_postgres: { - "__salt__": { - "config.option": Mock(), - "cmd.run_all": self.cmd_run_all_mock, - "file.chown": Mock(), - "file.remove": Mock(), - } - } - } - - def test_cluster_delete(self): - deb_postgres.cluster_remove("9.3", "main") - self.assertEqual( - "/usr/bin/pg_dropcluster 9.3 main", self.cmd_run_all_mock.call_args[0][0] - ) - deb_postgres.cluster_remove("9.3", "main", stop=True) - self.assertEqual( - "/usr/bin/pg_dropcluster --stop 9.3 main", - self.cmd_run_all_mock.call_args[0][0], - ) - deb_postgres.cluster_remove(9.3, "main", stop=True) - self.assertEqual( - "/usr/bin/pg_dropcluster --stop 9.3 main", - self.cmd_run_all_mock.call_args[0][0], - ) diff --git a/tests/unit/modules/test_debian_ip.py b/tests/unit/modules/test_debian_ip.py deleted file mode 100644 index 4934f7e52947..000000000000 --- a/tests/unit/modules/test_debian_ip.py +++ /dev/null @@ -1,1198 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import tempfile - -import jinja2.exceptions -import pytest - -import salt.modules.debian_ip as debian_ip -import salt.utils.files -import salt.utils.platform -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -try: - from salt.utils.odict import OrderedDict as odict -except ImportError: - from collections import OrderedDict as odict - -# Big pile of interface data for unit tests -# To skip, search for 'DebianIpTestCase' -# fmt: off -test_interfaces = [ - # Structure - #{'iface_name': 'ethX', 'iface_type': 'eth', 'enabled': True, - # 'skip_test': bool(), # True to disable this test - # 'build_interface': dict(), # data read from sls - # 'get_interface(): OrderedDict(), # data read from interfaces file - # 'return': list()}, # jinja-rendered data - - # IPv4-only interface; single address - {'iface_name': 'eth1', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '192.168.4.9', - 'netmask': '255.255.255.0', - 'gateway': '192.168.4.1', - 'enable_ipv6': False, - 'noifupdown': True, - }, - 'get_interface': odict([('eth1', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('address', '192.168.4.9'), - ('netmask', '255.255.255.0'), - ('gateway', '192.168.4.1'), - ])), - ]))]))]), - 'return': [ - 'auto eth1\n', - 'iface eth1 inet static\n', - ' address 192.168.4.9\n', - ' netmask 255.255.255.0\n', - ' gateway 192.168.4.1\n', - '\n']}, - - # IPv6-only; single address - {'iface_name': 'eth2', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:beef::3', - 'ipv6netmask': '64', - 'ipv6gateway': '2001:db8:dead:beef::1', - 'enable_ipv6': True, - 'noifupdown': True, - }, - 'get_interface': odict([('eth2', odict([('enabled', True), ('data', odict([ - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:beef::3'), - ('netmask', 64), - ('gateway', '2001:db8:dead:beef::1'), - ])), - ]))]))]), - 'return': [ - 'auto eth2\n', - 'iface eth2 inet6 static\n', - ' address 2001:db8:dead:beef::3\n', - ' netmask 64\n', - ' gateway 2001:db8:dead:beef::1\n', - '\n']}, - - # IPv6-only; multiple addrs; no gw; first addr from ipv6addr - {'iface_name': 'eth3', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:beef::5/64', - 'ipv6ipaddrs': [ - '2001:db8:dead:beef::7/64', - '2001:db8:dead:beef::8/64', - '2001:db8:dead:beef::9/64'], - 'enable_ipv6': True, - 'noifupdown': True, - }, - 'get_interface': odict([('eth3', odict([('enabled', True), ('data', odict([ - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:beef::5/64'), - ('addresses', [ - '2001:db8:dead:beef::7/64', - '2001:db8:dead:beef::8/64', - '2001:db8:dead:beef::9/64', - ]), - ])), - ]))]))]), - 'return': [ - 'auto eth3\n', - 'iface eth3 inet6 static\n', - ' address 2001:db8:dead:beef::5/64\n', - ' address 2001:db8:dead:beef::7/64\n', - ' address 2001:db8:dead:beef::8/64\n', - ' address 2001:db8:dead:beef::9/64\n', - '\n']}, - - # IPv6-only; multiple addresses - {'iface_name': 'eth4', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'ipv6proto': 'static', - 'ipv6ipaddrs': [ - '2001:db8:dead:beef::5/64', - '2001:db8:dead:beef::7/64', - '2001:db8:dead:beef::8/64', - '2001:db8:dead:beef::9/64'], - 'ipv6gateway': '2001:db8:dead:beef::1', - 'enable_ipv6': True, - 'noifupdown': True, - }, - 'get_interface': odict([('eth4', odict([('enabled', True), ('data', odict([ - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:beef::5/64'), - ('addresses', [ - '2001:db8:dead:beef::7/64', - '2001:db8:dead:beef::8/64', - '2001:db8:dead:beef::9/64', - ]), - ('gateway', '2001:db8:dead:beef::1'), - ])), - ]))]))]), - 'return': [ - 'auto eth4\n', - 'iface eth4 inet6 static\n', - ' address 2001:db8:dead:beef::5/64\n', - ' address 2001:db8:dead:beef::7/64\n', - ' address 2001:db8:dead:beef::8/64\n', - ' address 2001:db8:dead:beef::9/64\n', - ' gateway 2001:db8:dead:beef::1\n', - '\n']}, - - # IPv4 and IPv6 settings with v4 disabled - {'iface_name': 'eth5', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '192.168.4.9', - 'netmask': '255.255.255.0', - 'gateway': '192.168.4.1', - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:beef::3', - 'ipv6netmask': '64', - 'ipv6gateway': '2001:db8:dead:beef::1', - 'enable_ipv4': False, - 'noifupdown': True, - }, - 'get_interface': odict([('eth5', odict([('enabled', True), ('data', odict([ - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:beef::3'), - ('netmask', 64), - ('gateway', '2001:db8:dead:beef::1'), - ])), - ]))]))]), - 'return': [ - 'auto eth5\n', - 'iface eth5 inet6 static\n', - ' address 2001:db8:dead:beef::3\n', - ' netmask 64\n', - ' gateway 2001:db8:dead:beef::1\n', - '\n']}, - - # IPv4 and IPv6 settings with v6 disabled - {'iface_name': 'eth6', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '192.168.4.9', - 'netmask': '255.255.255.0', - 'gateway': '192.168.4.1', - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:beef::3', - 'ipv6netmask': '64', - 'ipv6gateway': '2001:db8:dead:beef::1', - 'enable_ipv6': False, - 'noifupdown': True, - }, - 'get_interface': odict([('eth6', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('address', '192.168.4.9'), - ('netmask', '255.255.255.0'), - ('gateway', '192.168.4.1'), - ])), - ]))]))]), - 'return': [ - 'auto eth6\n', - 'iface eth6 inet static\n', - ' address 192.168.4.9\n', - ' netmask 255.255.255.0\n', - ' gateway 192.168.4.1\n', - '\n']}, - - # IPv4 and IPv6; shared/overridden settings - {'iface_name': 'eth7', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '192.168.4.9', - 'netmask': '255.255.255.0', - 'gateway': '192.168.4.1', - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:beef::3', - 'ipv6netmask': '64', - 'ipv6gateway': '2001:db8:dead:beef::1', - 'ttl': '18', # shared - 'ipv6ttl': '15', # overridden for v6 - 'mtu': '1480', # shared - 'enable_ipv6': True, - 'noifupdown': True, - }, - 'get_interface': odict([('eth7', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('address', '192.168.4.9'), - ('netmask', '255.255.255.0'), - ('gateway', '192.168.4.1'), - ('ttl', 18), - ('mtu', 1480), - ])), - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:beef::3'), - ('netmask', 64), - ('gateway', '2001:db8:dead:beef::1'), - ('ttl', 15), - ('mtu', 1480), - ])), - ]))]))]), - 'return': [ - 'auto eth7\n', - 'iface eth7 inet static\n', - ' address 192.168.4.9\n', - ' netmask 255.255.255.0\n', - ' gateway 192.168.4.1\n', - ' ttl 18\n', - ' mtu 1480\n', - 'iface eth7 inet6 static\n', - ' address 2001:db8:dead:beef::3\n', - ' netmask 64\n', - ' gateway 2001:db8:dead:beef::1\n', - ' ttl 15\n', - ' mtu 1480\n', - '\n']}, - - # Slave iface - {'iface_name': 'eth8', 'iface_type': 'slave', 'enabled': True, - 'build_interface': { - 'master': 'bond0', - 'noifupdown': True, - }, - 'get_interface': odict([('eth8', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'manual'), - ('filename', None), - ('bonding', odict([ - ('master', 'bond0'), - ])), - ('bonding_keys', ['master']), - ])), - ]))]))]), - 'return': [ - 'auto eth8\n', - 'iface eth8 inet manual\n', - ' bond-master bond0\n', - '\n']}, - - # Bond; with address IPv4 and IPv6 address; slaves as string - {'iface_name': 'bond9', 'iface_type': 'bond', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '10.1.0.14', - 'netmask': '255.255.255.0', - 'gateway': '10.1.0.1', - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:c0::3', - 'ipv6netmask': '64', - 'ipv6gateway': '2001:db8:dead:c0::1', - 'mode': '802.3ad', - 'slaves': 'eth4 eth5', - 'enable_ipv6': True, - 'noifupdown': True, - }, - 'get_interface': odict([('bond9', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('address', '10.1.0.14'), - ('netmask', '255.255.255.0'), - ('gateway', '10.1.0.1'), - ('bonding', odict([ - ('ad_select', '0'), - ('downdelay', '200'), - ('lacp_rate', '0'), - ('miimon', '100'), - ('mode', '4'), - ('slaves', 'eth4 eth5'), - ('updelay', '0'), - ('use_carrier', 'on'), - ])), - ('bonding_keys', [ - 'ad_select', - 'downdelay', - 'lacp_rate', - 'miimon', - 'mode', - 'slaves', - 'updelay', - 'use_carrier', - ]), - ])), - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:c0::3'), - ('netmask', 64), - ('gateway', '2001:db8:dead:c0::1'), - ('bonding', odict([ - ('ad_select', '0'), - ('downdelay', '200'), - ('lacp_rate', '0'), - ('miimon', '100'), - ('mode', '4'), - ('slaves', 'eth4 eth5'), - ('updelay', '0'), - ('use_carrier', 'on'), - ])), - ('bonding_keys', [ - 'ad_select', - 'downdelay', - 'lacp_rate', - 'miimon', - 'mode', - 'slaves', - 'updelay', - 'use_carrier', - ]), - ])), - ]))]))]), - 'return': [ - 'auto bond9\n', - 'iface bond9 inet static\n', - ' address 10.1.0.14\n', - ' netmask 255.255.255.0\n', - ' gateway 10.1.0.1\n', - ' bond-ad_select 0\n', - ' bond-downdelay 200\n', - ' bond-lacp_rate 0\n', - ' bond-miimon 100\n', - ' bond-mode 4\n', - ' bond-slaves eth4 eth5\n', - ' bond-updelay 0\n', - ' bond-use_carrier on\n', - 'iface bond9 inet6 static\n', - ' address 2001:db8:dead:c0::3\n', - ' netmask 64\n', - ' gateway 2001:db8:dead:c0::1\n', - ' bond-ad_select 0\n', - ' bond-downdelay 200\n', - ' bond-lacp_rate 0\n', - ' bond-miimon 100\n', - ' bond-mode 4\n', - ' bond-slaves eth4 eth5\n', - ' bond-updelay 0\n', - ' bond-use_carrier on\n', - '\n']}, - - # Bond; with address IPv4 and IPv6 address; slaves as list - {'iface_name': 'bond10', 'iface_type': 'bond', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '10.1.0.14', - 'netmask': '255.255.255.0', - 'gateway': '10.1.0.1', - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:c0::3', - 'ipv6netmask': '64', - 'ipv6gateway': '2001:db8:dead:c0::1', - 'mode': '802.3ad', - 'slaves': ['eth4', 'eth5'], - 'enable_ipv6': True, - 'noifupdown': True, - }, - 'get_interface': odict([('bond10', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('address', '10.1.0.14'), - ('netmask', '255.255.255.0'), - ('gateway', '10.1.0.1'), - ('bonding', odict([ - ('ad_select', '0'), - ('downdelay', '200'), - ('lacp_rate', '0'), - ('miimon', '100'), - ('mode', '4'), - ('slaves', 'eth4 eth5'), - ('updelay', '0'), - ('use_carrier', 'on'), - ])), - ('bonding_keys', [ - 'ad_select', - 'downdelay', - 'lacp_rate', - 'miimon', - 'mode', - 'slaves', - 'updelay', - 'use_carrier', - ]), - ])), - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:c0::3'), - ('netmask', 64), - ('gateway', '2001:db8:dead:c0::1'), - ('bonding', odict([ - ('ad_select', '0'), - ('downdelay', '200'), - ('lacp_rate', '0'), - ('miimon', '100'), - ('mode', '4'), - ('slaves', 'eth4 eth5'), - ('updelay', '0'), - ('use_carrier', 'on'), - ])), - ('bonding_keys', [ - 'ad_select', - 'downdelay', - 'lacp_rate', - 'miimon', - 'mode', - 'slaves', - 'updelay', - 'use_carrier', - ]), - ])), - ]))]))]), - 'return': [ - 'auto bond10\n', - 'iface bond10 inet static\n', - ' address 10.1.0.14\n', - ' netmask 255.255.255.0\n', - ' gateway 10.1.0.1\n', - ' bond-ad_select 0\n', - ' bond-downdelay 200\n', - ' bond-lacp_rate 0\n', - ' bond-miimon 100\n', - ' bond-mode 4\n', - ' bond-slaves eth4 eth5\n', - ' bond-updelay 0\n', - ' bond-use_carrier on\n', - 'iface bond10 inet6 static\n', - ' address 2001:db8:dead:c0::3\n', - ' netmask 64\n', - ' gateway 2001:db8:dead:c0::1\n', - ' bond-ad_select 0\n', - ' bond-downdelay 200\n', - ' bond-lacp_rate 0\n', - ' bond-miimon 100\n', - ' bond-mode 4\n', - ' bond-slaves eth4 eth5\n', - ' bond-updelay 0\n', - ' bond-use_carrier on\n', - '\n']}, - - # Bond VLAN; with IPv4 address - {'iface_name': 'bond0.11', 'iface_type': 'vlan', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '10.7.0.8', - 'netmask': '255.255.255.0', - 'gateway': '10.7.0.1', - 'slaves': 'eth6 eth7', - 'mode': '802.3ad', - 'enable_ipv6': False, - 'noifupdown': True, - }, - 'get_interface': odict([('bond0.11', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('vlan_raw_device', 'bond1'), - ('address', '10.7.0.8'), - ('netmask', '255.255.255.0'), - ('gateway', '10.7.0.1'), - ('mode', '802.3ad'), - ])), - ]))]))]), - 'return': [ - 'auto bond0.11\n', - 'iface bond0.11 inet static\n', - ' vlan-raw-device bond1\n', - ' address 10.7.0.8\n', - ' netmask 255.255.255.0\n', - ' gateway 10.7.0.1\n', - ' mode 802.3ad\n', - '\n']}, - - # Bond; without address - {'iface_name': 'bond0.12', 'iface_type': 'vlan', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'slaves': 'eth6 eth7', - 'mode': '802.3ad', - 'enable_ipv6': False, - 'noifupdown': True, - }, - 'get_interface': odict([('bond0.12', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('vlan_raw_device', 'bond1'), - ('mode', '802.3ad'), - ])), - ]))]))]), - 'return': [ - 'auto bond0.12\n', - 'iface bond0.12 inet static\n', - ' vlan-raw-device bond1\n', - ' mode 802.3ad\n', - '\n']}, - - # Bridged interface - {'iface_name': 'br0', 'iface_type': 'bridge', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '192.168.4.10', - 'netmask': '255.255.255.0', - 'gateway': '192.168.4.1', - 'bridge_ports': 'eth1', - 'enable_ipv6': False, - 'noifupdown': True, - }, - 'get_interface': odict([('br0', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('address', '192.168.4.10'), - ('netmask', '255.255.255.0'), - ('gateway', '192.168.4.1'), - ('bridging', odict([ - ('ports', 'eth1'), - ])), - ('bridging_keys', ['ports']), - ])), - ]))]))]), - 'return': [ - 'auto br0\n', - 'iface br0 inet static\n', - ' address 192.168.4.10\n', - ' netmask 255.255.255.0\n', - ' gateway 192.168.4.1\n', - ' bridge_ports eth1\n', - '\n']}, - - - # DNS NS as list - {'iface_name': 'eth13', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '192.168.4.9', - 'netmask': '255.255.255.0', - 'gateway': '192.168.4.1', - 'enable_ipv6': False, - 'noifupdown': True, - 'dns': ['8.8.8.8', '8.8.4.4'], - }, - 'get_interface': odict([('eth13', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('address', '192.168.4.9'), - ('netmask', '255.255.255.0'), - ('gateway', '192.168.4.1'), - ('dns_nameservers', ['8.8.8.8', '8.8.4.4']), - ])), - ]))]))]), - 'return': [ - 'auto eth13\n', - 'iface eth13 inet static\n', - ' address 192.168.4.9\n', - ' netmask 255.255.255.0\n', - ' gateway 192.168.4.1\n', - ' dns-nameservers 8.8.8.8 8.8.4.4\n', - '\n']}, - - # DNS NS as string - {'iface_name': 'eth14', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'static', - 'ipaddr': '192.168.4.9', - 'netmask': '255.255.255.0', - 'gateway': '192.168.4.1', - 'enable_ipv6': False, - 'noifupdown': True, - 'dns': '8.8.8.8 8.8.4.4', - }, - 'get_interface': odict([('eth14', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'static'), - ('filename', None), - ('address', '192.168.4.9'), - ('netmask', '255.255.255.0'), - ('gateway', '192.168.4.1'), - ('dns_nameservers', ['8.8.8.8', '8.8.4.4']), - ])), - ]))]))]), - 'return': [ - 'auto eth14\n', - 'iface eth14 inet static\n', - ' address 192.168.4.9\n', - ' netmask 255.255.255.0\n', - ' gateway 192.168.4.1\n', - ' dns-nameservers 8.8.8.8 8.8.4.4\n', - '\n']}, - - # Loopback; with IPv4 and IPv6 address - {'iface_name': 'lo15', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'loopback', - 'ipaddr': '192.168.4.9', - 'netmask': '255.255.255.0', - 'gateway': '192.168.4.1', - 'enable_ipv6': True, - 'ipv6proto': 'loopback', - 'ipv6ipaddr': 'fc00::1', - 'ipv6netmask': '128', - 'ipv6_autoconf': False, - 'noifupdown': True, - }, - 'get_interface': odict([('lo15', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'loopback'), - ('filename', None), - ('address', '192.168.4.9'), - ('netmask', '255.255.255.0'), - ('gateway', '192.168.4.1'), - ])), - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'loopback'), - ('filename', None), - ('address', 'fc00::1'), - ('netmask', 128), - ])), - ]))]))]), - 'return': [ - 'auto lo15\n', - 'iface lo15 inet loopback\n', - ' address 192.168.4.9\n', - ' netmask 255.255.255.0\n', - ' gateway 192.168.4.1\n', - 'iface lo15 inet6 loopback\n', - ' address fc00::1\n', - ' netmask 128\n', - '\n']}, - - # Loopback; with only IPv6 address; enabled=False - {'iface_name': 'lo16', 'iface_type': 'eth', 'enabled': False, - 'build_interface': { - 'enable_ipv6': True, - 'ipv6proto': 'loopback', - 'ipv6ipaddr': 'fc00::1', - 'ipv6netmask': '128', - 'ipv6_autoconf': False, - 'noifupdown': True, - }, - 'get_interface': odict([('lo16', odict([('data', odict([ - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'loopback'), - ('filename', None), - ('address', 'fc00::1'), - ('netmask', 128), - ])), - ]))]))]), - 'return': [ - 'iface lo16 inet6 loopback\n', - ' address fc00::1\n', - ' netmask 128\n', - '\n']}, - - # Loopback; without address - {'iface_name': 'lo17', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'loopback', - 'enable_ipv6': False, - 'noifupdown': True, - }, - 'get_interface': odict([('lo17', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'loopback'), - ('filename', None), - ])), - ]))]))]), - 'return': [ - 'auto lo17\n', - 'iface lo17 inet loopback\n', - '\n']}, - - # IPv4=DHCP; IPv6=Static; with IPv6 netmask - {'iface_name': 'eth18', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'dhcp', - 'enable_ipv6': True, - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:c0::3', - 'ipv6netmask': '64', - 'ipv6gateway': '2001:db8:dead:c0::1', - 'noifupdown': True, - }, - 'get_interface': odict([('eth18', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'dhcp'), - ('filename', None), - ])), - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:c0::3'), - ('netmask', 64), - ('gateway', '2001:db8:dead:c0::1'), - ])), - ]))]))]), - 'return': [ - 'auto eth18\n', - 'iface eth18 inet dhcp\n', - 'iface eth18 inet6 static\n', - ' address 2001:db8:dead:c0::3\n', - ' netmask 64\n', - ' gateway 2001:db8:dead:c0::1\n', - '\n']}, - - # IPv4=DHCP; IPv6=Static; without IPv6 netmask - {'iface_name': 'eth19', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'proto': 'dhcp', - 'enable_ipv6': True, - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:c0::3/64', - 'ipv6gateway': '2001:db8:dead:c0::1', - 'noifupdown': True, - }, - 'get_interface': odict([('eth19', odict([('enabled', True), ('data', odict([ - ('inet', odict([ - ('addrfam', 'inet'), - ('proto', 'dhcp'), - ('filename', None), - ])), - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('address', '2001:db8:dead:c0::3/64'), - ('gateway', '2001:db8:dead:c0::1'), - ])), - ]))]))]), - 'return': [ - 'auto eth19\n', - 'iface eth19 inet dhcp\n', - 'iface eth19 inet6 static\n', - ' address 2001:db8:dead:c0::3/64\n', - ' gateway 2001:db8:dead:c0::1\n', - '\n']}, - - # IPv6-only; static with autoconf and accept_ra forced - {'iface_name': 'eth20', 'iface_type': 'eth', 'enabled': True, - 'build_interface': { - 'ipv6proto': 'static', - 'ipv6ipaddr': '2001:db8:dead:beef::3/64', - 'ipv6gateway': '2001:db8:dead:beef::1', - 'enable_ipv6': True, - 'autoconf': 1, - 'accept_ra': 2, - 'noifupdown': True, - }, - 'get_interface': odict([('eth20', odict([('enabled', True), ('data', odict([ - ('inet6', odict([ - ('addrfam', 'inet6'), - ('proto', 'static'), - ('filename', None), - ('autoconf', 1), - ('address', '2001:db8:dead:beef::3/64'), - ('gateway', '2001:db8:dead:beef::1'), - ('accept_ra', 2), - ])), - ]))]))]), - 'return': [ - 'auto eth20\n', - 'iface eth20 inet6 static\n', - ' autoconf 1\n', - ' address 2001:db8:dead:beef::3/64\n', - ' gateway 2001:db8:dead:beef::1\n', - ' accept_ra 2\n', - '\n']}, - ] -# fmt: on - - -@pytest.mark.skip_on_windows(reason="Do not run these tests on Windows") -class DebianIpTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.debian_ip - """ - - def setup_loader_modules(self): - return {debian_ip: {}} - - # 'build_bond' function tests: 3 - - def test_build_bond(self): - """ - Test if it create a bond script in /etc/modprobe.d with the passed - settings and load the bonding kernel module. - """ - with patch( - "salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={}) - ), patch("salt.modules.debian_ip._write_file", MagicMock(return_value=True)): - mock = MagicMock(return_value=1) - with patch.dict(debian_ip.__grains__, {"osrelease": mock}): - mock = MagicMock(return_value=True) - with patch.dict( - debian_ip.__salt__, {"kmod.load": mock, "pkg.install": mock} - ): - self.assertEqual(debian_ip.build_bond("bond0"), "") - - def test_error_message_iface_should_process_non_str_expected(self): - values = [1, True, False, "no-kaboom"] - iface = "ethtest" - option = "test" - msg = debian_ip._error_msg_iface(iface, option, values) - self.assertTrue(msg.endswith("[1|True|False|no-kaboom]"), msg) - - def test_error_message_network_should_process_non_str_expected(self): - values = [1, True, False, "no-kaboom"] - msg = debian_ip._error_msg_network("fnord", values) - self.assertTrue(msg.endswith("[1|True|False|no-kaboom]"), msg) - - def test_build_bond_exception(self): - """ - Test if it create a bond script in /etc/modprobe.d with the passed - settings and load the bonding kernel module. - """ - with patch( - "salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={}) - ): - mock = MagicMock(return_value=1) - with patch.dict(debian_ip.__grains__, {"osrelease": mock}): - mock = MagicMock( - side_effect=jinja2.exceptions.TemplateNotFound("error") - ) - with patch.object(jinja2.Environment, "get_template", mock): - self.assertEqual(debian_ip.build_bond("bond0"), "") - - def test_build_bond_data(self): - """ - Test if it create a bond script in /etc/modprobe.d with the passed - settings and load the bonding kernel module. - """ - with patch( - "salt.modules.debian_ip._parse_settings_bond", MagicMock(return_value={}) - ), patch("salt.modules.debian_ip._read_temp", MagicMock(return_value=True)): - mock = MagicMock(return_value=1) - with patch.dict(debian_ip.__grains__, {"osrelease": mock}): - self.assertTrue(debian_ip.build_bond("bond0", test="True")) - - # 'build_routes' function tests: 2 - - def test_build_routes(self): - """ - Test if it add route scripts for a network interface using up commands. - """ - with patch( - "salt.modules.debian_ip._parse_routes", - MagicMock(return_value={"routes": []}), - ), patch( - "salt.modules.debian_ip._write_file_routes", MagicMock(return_value=True) - ), patch( - "salt.modules.debian_ip._read_file", MagicMock(return_value="salt") - ): - self.assertEqual(debian_ip.build_routes("eth0"), "saltsalt") - - def test_build_routes_exception(self): - """ - Test if it add route scripts for a network interface using up commands. - """ - with patch( - "salt.modules.debian_ip._parse_routes", - MagicMock(return_value={"routes": []}), - ): - self.assertTrue(debian_ip.build_routes("eth0", test="True")) - - mock = MagicMock(side_effect=jinja2.exceptions.TemplateNotFound("err")) - with patch.object(jinja2.Environment, "get_template", mock): - self.assertEqual(debian_ip.build_routes("eth0"), "") - - # 'down' function tests: 1 - - def test_down(self): - """ - Test if it shutdown a network interface - """ - self.assertEqual(debian_ip.down("eth0", "slave"), None) - - mock = MagicMock(return_value="Salt") - with patch.dict(debian_ip.__salt__, {"cmd.run": mock}): - self.assertEqual(debian_ip.down("eth0", "eth"), "Salt") - - # 'get_bond' function tests: 1 - - def test_get_bond(self): - """ - Test if it return the content of a bond script - """ - self.assertEqual(debian_ip.get_bond("bond0"), "") - - # '_parse_interfaces' function tests: 1 - - def test_parse_interfaces(self): - """ - Test if it returns the correct data for parsed configuration file - """ - with tempfile.NamedTemporaryFile(mode="r", delete=True) as tfile: - for iface in test_interfaces: - iname = iface["iface_name"] - if iface.get("skip_test", False): - continue - with salt.utils.files.fopen(str(tfile.name), "w") as fh: - fh.writelines(iface["return"]) - for inet in ["inet", "inet6"]: - if inet in iface["get_interface"][iname]["data"]: - iface["get_interface"][iname]["data"][inet]["filename"] = str( - tfile.name - ) - self.assertDictEqual( - debian_ip._parse_interfaces([str(tfile.name)]), - iface["get_interface"], - ) - - # 'get_interface' function tests: 1 - - def test_get_interface(self): - """ - Test if it return the contents of an interface script - """ - for iface in test_interfaces: - if iface.get("skip_test", False): - continue - with patch.object( - debian_ip, - "_parse_interfaces", - MagicMock(return_value=iface["get_interface"]), - ): - self.assertListEqual( - debian_ip.get_interface(iface["iface_name"]), iface["return"] - ) - - # 'build_interface' function tests: 1 - - def test_build_interface(self): - """ - Test if it builds an interface script for a network interface. - """ - with patch( - "salt.modules.debian_ip._write_file_ifaces", MagicMock(return_value="salt") - ): - self.assertEqual( - debian_ip.build_interface("eth0", "eth", "enabled"), - ["s\n", "a\n", "l\n", "t\n"], - ) - - self.assertTrue( - debian_ip.build_interface("eth0", "eth", "enabled", test="True") - ) - - with patch.object( - debian_ip, "_parse_settings_eth", MagicMock(return_value={"routes": []}) - ): - for eth_t in ["bridge", "slave", "bond"]: - self.assertRaises( - AttributeError, - debian_ip.build_interface, - "eth0", - eth_t, - "enabled", - ) - - self.assertTrue( - debian_ip.build_interface("eth0", "eth", "enabled", test="True") - ) - - with tempfile.NamedTemporaryFile(mode="r", delete=True) as tfile: - with patch("salt.modules.debian_ip._DEB_NETWORK_FILE", str(tfile.name)): - for iface in test_interfaces: - if iface.get("skip_test", False): - continue - # Skip tests that require __salt__['pkg.install']() - if iface["iface_type"] in ["bridge", "pppoe", "vlan"]: - continue - self.assertListEqual( - debian_ip.build_interface( - iface=iface["iface_name"], - iface_type=iface["iface_type"], - enabled=iface["enabled"], - interface_file=tfile.name, - **iface["build_interface"] - ), - iface["return"], - ) - - # 'up' function tests: 1 - - def test_up(self): - """ - Test if it start up a network interface - """ - self.assertEqual(debian_ip.down("eth0", "slave"), None) - - mock = MagicMock(return_value="Salt") - with patch.dict(debian_ip.__salt__, {"cmd.run": mock}): - self.assertEqual(debian_ip.up("eth0", "eth"), "Salt") - - # 'get_network_settings' function tests: 1 - - def test_get_network_settings(self): - """ - Test if it return the contents of the global network script. - """ - with patch.dict( - debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "14"} - ), patch( - "salt.modules.debian_ip._parse_hostname", - MagicMock(return_value="SaltStack"), - ), patch( - "salt.modules.debian_ip._parse_domainname", - MagicMock(return_value="saltstack.com"), - ): - mock_avai = MagicMock(return_value=True) - with patch.dict( - debian_ip.__salt__, - {"service.available": mock_avai, "service.status": mock_avai}, - ): - self.assertEqual( - debian_ip.get_network_settings(), - [ - "NETWORKING=yes\n", - "HOSTNAME=SaltStack\n", - "DOMAIN=saltstack.com\n", - ], - ) - - mock = MagicMock( - side_effect=jinja2.exceptions.TemplateNotFound("error") - ) - with patch.object(jinja2.Environment, "get_template", mock): - self.assertEqual(debian_ip.get_network_settings(), "") - - # 'get_routes' function tests: 1 - - def test_get_routes(self): - """ - Test if it return the routes for the interface - """ - with patch("salt.modules.debian_ip._read_file", MagicMock(return_value="salt")): - self.assertEqual(debian_ip.get_routes("eth0"), "saltsalt") - - # 'apply_network_settings' function tests: 1 - - @pytest.mark.slow_test - def test_apply_network_settings(self): - """ - Test if it apply global network configuration. - """ - mock = MagicMock(return_value=True) - with patch.dict( - debian_ip.__salt__, - {"network.mod_hostname": mock, "service.stop": mock, "service.start": mock}, - ): - self.assertEqual(debian_ip.apply_network_settings(), True) - - # 'build_network_settings' function tests: 1 - - def test_build_network_settings(self): - """ - Test if it build the global network script. - """ - with patch( - "salt.modules.debian_ip._parse_network_settings", - MagicMock( - return_value={ - "networking": "yes", - "hostname": "Salt.saltstack.com", - "domainname": "saltstack.com", - "search": "test.saltstack.com", - } - ), - ), patch( - "salt.modules.debian_ip._write_file_network", MagicMock(return_value=True) - ): - with patch.dict( - debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "14"} - ): - mock = MagicMock(return_value=True) - with patch.dict( - debian_ip.__salt__, - { - "service.available": mock, - "service.disable": mock, - "service.enable": mock, - }, - ): - self.assertEqual( - debian_ip.build_network_settings(), - [ - "NETWORKING=yes\n", - "HOSTNAME=Salt\n", - "DOMAIN=saltstack.com\n", - "SEARCH=test.saltstack.com\n", - ], - ) - - mock = MagicMock( - side_effect=jinja2.exceptions.TemplateNotFound("error") - ) - with patch.object(jinja2.Environment, "get_template", mock): - self.assertEqual(debian_ip.build_network_settings(), "") - - with patch.dict( - debian_ip.__grains__, {"osfullname": "Ubuntu", "osrelease": "10"} - ): - mock = MagicMock(return_value=True) - with patch.dict( - debian_ip.__salt__, - { - "service.available": mock, - "service.disable": mock, - "service.enable": mock, - }, - ): - mock = MagicMock( - side_effect=jinja2.exceptions.TemplateNotFound("error") - ) - with patch.object(jinja2.Environment, "get_template", mock): - self.assertEqual(debian_ip.build_network_settings(), "") - - with patch.object( - debian_ip, "_read_temp", MagicMock(return_value=True) - ): - self.assertTrue(debian_ip.build_network_settings(test="True")) diff --git a/tests/unit/modules/test_dig.py b/tests/unit/modules/test_dig.py deleted file mode 100644 index ca2ccefd9312..000000000000 --- a/tests/unit/modules/test_dig.py +++ /dev/null @@ -1,193 +0,0 @@ -""" - :codeauthor: Nicole Thomas -""" -import pytest - -import salt.modules.dig as dig -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -_SPF_VALUES = { - "dig +short xmission.com TXT": { - "pid": 27282, - "retcode": 0, - "stderr": "", - "stdout": '"v=spf1 a mx include:_spf.xmission.com ?all"', - }, - "dig +short _spf.xmission.com TXT": { - "pid": 27282, - "retcode": 0, - "stderr": "", - "stdout": '"v=spf1 a mx ip4:198.60.22.0/24 ip4:166.70.13.0/24 ~all"', - }, - "dig +short xmission-redirect.com TXT": { - "pid": 27282, - "retcode": 0, - "stderr": "", - "stdout": "v=spf1 redirect=_spf.xmission.com", - }, - "dig +short foo.com TXT": { - "pid": 27282, - "retcode": 0, - "stderr": "", - "stdout": "v=spf1 ip4:216.73.93.70/31 ip4:216.73.93.72/31 ~all", - }, -} - - -def _spf_side_effect(key, python_shell=False): - return _SPF_VALUES.get( - " ".join(key), {"pid": 27310, "retcode": 0, "stderr": "", "stdout": ""} - ) - - -@pytest.mark.skipif(dig.__virtual__() is False, reason="Dig must be installed") -class DigTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - return {dig: {}} - - def test_check_ip(self): - self.assertTrue(dig.check_ip("127.0.0.1"), msg="Not a valid ip address") - - def test_check_ip_ipv6(self): - self.assertTrue( - dig.check_ip("1111:2222:3333:4444:5555:6666:7777:8888"), - msg="Not a valid ip address", - ) - - def test_check_ip_ipv6_valid(self): - self.assertTrue(dig.check_ip("2607:fa18:0:3::4")) - - def test_check_ip_neg(self): - self.assertFalse( - dig.check_ip("-127.0.0.1"), msg="Did not detect negative value as invalid" - ) - - def test_check_ip_empty(self): - self.assertFalse(dig.check_ip(""), msg="Did not detect empty value as invalid") - - def test_a(self): - dig_mock = MagicMock( - return_value={ - "pid": 3656, - "retcode": 0, - "stderr": "", - "stdout": ( - "74.125.193.104\n" - "74.125.193.105\n" - "74.125.193.99\n" - "74.125.193.106\n" - "74.125.193.103\n" - "74.125.193.147" - ), - } - ) - with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): - self.assertEqual( - dig.A("www.google.com"), - [ - "74.125.193.104", - "74.125.193.105", - "74.125.193.99", - "74.125.193.106", - "74.125.193.103", - "74.125.193.147", - ], - ) - - def test_ptr(self): - dig_mock = MagicMock( - return_value={ - "pid": 3657, - "retcode": 0, - "stderr": "", - "stdout": ("dns.google."), - } - ) - with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): - self.assertEqual( - dig.ptr("8.8.8.8"), - [ - "dns.google.", - ], - ) - - def test_aaaa(self): - dig_mock = MagicMock( - return_value={ - "pid": 25451, - "retcode": 0, - "stderr": "", - "stdout": "2607:f8b0:400f:801::1014", - } - ) - with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): - self.assertEqual(dig.AAAA("www.google.com"), ["2607:f8b0:400f:801::1014"]) - - def test_ns(self): - with patch("salt.modules.dig.A", MagicMock(return_value=["ns4.google.com."])): - dig_mock = MagicMock( - return_value={ - "pid": 26136, - "retcode": 0, - "stderr": "", - "stdout": "ns4.google.com.", - } - ) - with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): - self.assertEqual(dig.NS("google.com"), ["ns4.google.com."]) - - def test_spf(self): - dig_mock = MagicMock(side_effect=_spf_side_effect) - with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): - self.assertEqual(dig.SPF("foo.com"), ["216.73.93.70/31", "216.73.93.72/31"]) - - def test_spf_redir(self): - """ - Test for SPF records which use the 'redirect' SPF mechanism - https://en.wikipedia.org/wiki/Sender_Policy_Framework#Mechanisms - """ - dig_mock = MagicMock(side_effect=_spf_side_effect) - with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): - self.assertEqual( - dig.SPF("xmission-redirect.com"), ["198.60.22.0/24", "166.70.13.0/24"] - ) - - def test_spf_include(self): - """ - Test for SPF records which use the 'include' SPF mechanism - https://en.wikipedia.org/wiki/Sender_Policy_Framework#Mechanisms - """ - dig_mock = MagicMock(side_effect=_spf_side_effect) - with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): - self.assertEqual( - dig.SPF("xmission.com"), ["198.60.22.0/24", "166.70.13.0/24"] - ) - - def test_mx(self): - dig_mock = MagicMock( - return_value={ - "pid": 27780, - "retcode": 0, - "stderr": "", - "stdout": ( - "10 aspmx.l.google.com.\n" - "20 alt1.aspmx.l.google.com.\n" - "40 alt3.aspmx.l.google.com.\n" - "50 alt4.aspmx.l.google.com.\n" - "30 alt2.aspmx.l.google.com." - ), - } - ) - with patch.dict(dig.__salt__, {"cmd.run_all": dig_mock}): - self.assertEqual( - dig.MX("google.com"), - [ - ["10", "aspmx.l.google.com."], - ["20", "alt1.aspmx.l.google.com."], - ["40", "alt3.aspmx.l.google.com."], - ["50", "alt4.aspmx.l.google.com."], - ["30", "alt2.aspmx.l.google.com."], - ], - ) diff --git a/tests/unit/modules/test_dnsutil.py b/tests/unit/modules/test_dnsutil.py deleted file mode 100644 index 7d2a2f61b3ce..000000000000 --- a/tests/unit/modules/test_dnsutil.py +++ /dev/null @@ -1,145 +0,0 @@ -""" - :codeauthor: Nicole Thomas -""" - -import logging - -import pytest - -import salt.modules.dnsutil as dnsutil -import salt.utils.stringutils -from tests.support.mock import MagicMock, mock_open, patch -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - -mock_hosts_file = salt.utils.stringutils.to_str( - "##\n" - "# Host Database\n" - "#\n" - "# localhost is used to configure the loopback interface\n" - "# when the system is booting. Do not change this entry.\n" - "##\n" - "127.0.0.1 localhost\n" - "255.255.255.255 broadcasthost\n" - "::1 localhost\n" - "fe80::1%lo0 localhost" -) - -mock_hosts_file_rtn = { - "::1": ["localhost"], - "255.255.255.255": ["broadcasthost"], - "127.0.0.1": ["localhost"], - "fe80::1%lo0": ["localhost"], -} - -mock_soa_zone = salt.utils.stringutils.to_str( - "$TTL 3D\n" - "@ IN SOA land-5.com. root.land-5.com. (\n" - "199609203 ; Serial\n" - "28800 ; Refresh\n" - "7200 ; Retry\n" - "604800 ; Expire\n" - "86400) ; Minimum TTL\n" - "NS land-5.com.\n\n" - "1 PTR localhost." -) - -mock_writes_list = salt.utils.data.decode( - [ - "##\n", - "# Host Database\n", - "#\n", - "# localhost is used to configure the loopback interface\n", - "# when the system is booting. Do not change this entry.\n", - "##\n", - "127.0.0.1 localhost", - "\n", - "255.255.255.255 broadcasthost", - "\n", - "::1 localhost", - "\n", - "fe80::1%lo0 localhost", - "\n", - ], - to_str=True, -) - - -class DNSUtilTestCase(TestCase): - def test_parse_hosts(self): - with patch("salt.utils.files.fopen", mock_open(read_data=mock_hosts_file)): - self.assertEqual( - dnsutil.parse_hosts(), - { - "::1": ["localhost"], - "255.255.255.255": ["broadcasthost"], - "127.0.0.1": ["localhost"], - "fe80::1%lo0": ["localhost"], - }, - ) - - def test_hosts_append(self): - with patch( - "salt.utils.files.fopen", mock_open(read_data=mock_hosts_file) - ) as m_open, patch( - "salt.modules.dnsutil.parse_hosts", - MagicMock(return_value=mock_hosts_file_rtn), - ): - dnsutil.hosts_append("/etc/hosts", "127.0.0.1", "ad1.yuk.co,ad2.yuk.co") - writes = m_open.write_calls() - # We should have called .write() only once, with the expected - # content - num_writes = len(writes) - assert num_writes == 1, num_writes - expected = salt.utils.stringutils.to_str( - "\n127.0.0.1 ad1.yuk.co ad2.yuk.co" - ) - assert writes[0] == expected, writes[0] - - def test_hosts_remove(self): - to_remove = "ad1.yuk.co" - new_mock_file = mock_hosts_file + "\n127.0.0.1 " + to_remove + "\n" - with patch( - "salt.utils.files.fopen", mock_open(read_data=new_mock_file) - ) as m_open: - dnsutil.hosts_remove("/etc/hosts", to_remove) - writes = m_open.write_calls() - assert writes == mock_writes_list, writes - - @pytest.mark.skip(reason="Waiting on bug report fixes") - def test_parse_zone(self): - with patch("salt.utils.files.fopen", mock_open(read_data=mock_soa_zone)): - log.debug(mock_soa_zone) - log.debug(dnsutil.parse_zone("/var/lib/named/example.com.zone")) - - def test_to_seconds_hour(self): - self.assertEqual( - dnsutil._to_seconds("4H"), - 14400, - msg="Did not detect valid hours as invalid", - ) - - def test_to_seconds_day(self): - self.assertEqual( - dnsutil._to_seconds("1D"), 86400, msg="Did not detect valid day as invalid" - ) - - def test_to_seconds_week(self): - self.assertEqual( - dnsutil._to_seconds("2W"), - 604800, - msg="Did not set time greater than one week to one week", - ) - - def test_to_seconds_empty(self): - self.assertEqual( - dnsutil._to_seconds(""), 604800, msg="Did not set empty time to one week" - ) - - def test_to_seconds_large(self): - self.assertEqual( - dnsutil._to_seconds("604801"), - 604800, - msg="Did not set time greater than one week to one week", - ) diff --git a/tests/unit/modules/test_dpkg_lowpkg.py b/tests/unit/modules/test_dpkg_lowpkg.py deleted file mode 100644 index a97519f4891f..000000000000 --- a/tests/unit/modules/test_dpkg_lowpkg.py +++ /dev/null @@ -1,359 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - - -import logging -import os - -import salt.modules.dpkg_lowpkg as dpkg -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -DPKG_ERROR_MSG = """dpkg-query: package 'httpd' is not installed -Use dpkg --contents (= dpkg-deb --contents) to list archive files contents. -""" - -DPKG_L_OUTPUT = { - "hostname": """\ -/. -/bin -/bin/hostname -/usr -/usr/share -/usr/share/doc -/usr/share/doc/hostname -/usr/share/doc/hostname/changelog.gz -/usr/share/doc/hostname/copyright -/usr/share/man -/usr/share/man/man1 -/usr/share/man/man1/hostname.1.gz -/bin/dnsdomainname -/bin/domainname -/bin/nisdomainname -/bin/ypdomainname -/usr/share/man/man1/dnsdomainname.1.gz -/usr/share/man/man1/domainname.1.gz -/usr/share/man/man1/nisdomainname.1.gz -/usr/share/man/man1/ypdomainname.1.gz -""" -} - - -class DpkgTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.dpkg - """ - - def setUp(self): - dpkg_lowpkg_logger = logging.getLogger("salt.modules.dpkg_lowpkg") - self.level = dpkg_lowpkg_logger.level - dpkg_lowpkg_logger.setLevel(logging.FATAL) - - def tearDown(self): - logging.getLogger("salt.modules.dpkg_lowpkg").setLevel(self.level) - - def dpkg_L_side_effect(self, cmd, **kwargs): - self.assertEqual(cmd[:2], ["dpkg", "-L"]) - package = cmd[2] - return DPKG_L_OUTPUT[package] - - def setup_loader_modules(self): - return {dpkg: {}} - - # 'unpurge' function tests: 2 - - def test_unpurge(self): - """ - Test if it change package selection for each package - specified to 'install' - """ - mock = MagicMock(return_value=[]) - with patch.dict(dpkg.__salt__, {"pkg.list_pkgs": mock, "cmd.run": mock}): - self.assertDictEqual(dpkg.unpurge("curl"), {}) - - def test_unpurge_empty_package(self): - """ - Test if it change package selection for each package - specified to 'install' - """ - self.assertDictEqual(dpkg.unpurge(), {}) - - # 'list_pkgs' function tests: 1 - - def test_list_pkgs(self): - """ - Test if it lists the packages currently installed - """ - mock = MagicMock( - return_value={ - "retcode": 0, - "stderr": "", - "stdout": "installed\thostname\t3.21", - } - ) - with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}): - self.assertDictEqual(dpkg.list_pkgs("hostname"), {"hostname": "3.21"}) - - mock = MagicMock( - return_value={ - "retcode": 1, - "stderr": "dpkg-query: no packages found matching httpd", - "stdout": "", - } - ) - with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}): - self.assertEqual( - dpkg.list_pkgs("httpd"), - "Error: dpkg-query: no packages found matching httpd", - ) - - # 'file_list' function tests: 1 - - def test_file_list(self): - """ - Test if it lists the files that belong to a package. - """ - dpkg_query_mock = MagicMock( - return_value={"retcode": 0, "stderr": "", "stdout": "installed\thostname"} - ) - dpkg_L_mock = MagicMock(side_effect=self.dpkg_L_side_effect) - with patch.dict( - dpkg.__salt__, {"cmd.run_all": dpkg_query_mock, "cmd.run": dpkg_L_mock} - ): - self.assertDictEqual( - dpkg.file_list("hostname"), - { - "errors": [], - "files": [ - "/.", - "/bin", - "/bin/dnsdomainname", - "/bin/domainname", - "/bin/hostname", - "/bin/nisdomainname", - "/bin/ypdomainname", - "/usr", - "/usr/share", - "/usr/share/doc", - "/usr/share/doc/hostname", - "/usr/share/doc/hostname/changelog.gz", - "/usr/share/doc/hostname/copyright", - "/usr/share/man", - "/usr/share/man/man1", - "/usr/share/man/man1/dnsdomainname.1.gz", - "/usr/share/man/man1/domainname.1.gz", - "/usr/share/man/man1/hostname.1.gz", - "/usr/share/man/man1/nisdomainname.1.gz", - "/usr/share/man/man1/ypdomainname.1.gz", - ], - }, - ) - - mock = MagicMock( - return_value={"retcode": 1, "stderr": DPKG_ERROR_MSG, "stdout": ""} - ) - with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}): - self.assertEqual(dpkg.file_list("httpd"), "Error: " + DPKG_ERROR_MSG) - - # 'file_dict' function tests: 1 - - def test_file_dict(self): - """ - Test if it lists the files that belong to a package, grouped by package - """ - dpkg_query_mock = MagicMock( - return_value={"retcode": 0, "stderr": "", "stdout": "installed\thostname"} - ) - dpkg_L_mock = MagicMock(side_effect=self.dpkg_L_side_effect) - with patch.dict( - dpkg.__salt__, {"cmd.run_all": dpkg_query_mock, "cmd.run": dpkg_L_mock} - ): - expected = { - "errors": [], - "packages": { - "hostname": [ - "/.", - "/bin", - "/bin/hostname", - "/usr", - "/usr/share", - "/usr/share/doc", - "/usr/share/doc/hostname", - "/usr/share/doc/hostname/changelog.gz", - "/usr/share/doc/hostname/copyright", - "/usr/share/man", - "/usr/share/man/man1", - "/usr/share/man/man1/hostname.1.gz", - "/bin/dnsdomainname", - "/bin/domainname", - "/bin/nisdomainname", - "/bin/ypdomainname", - "/usr/share/man/man1/dnsdomainname.1.gz", - "/usr/share/man/man1/domainname.1.gz", - "/usr/share/man/man1/nisdomainname.1.gz", - "/usr/share/man/man1/ypdomainname.1.gz", - ] - }, - } - self.assertDictEqual(dpkg.file_dict("hostname"), expected) - - mock = MagicMock( - return_value={"retcode": 1, "stderr": DPKG_ERROR_MSG, "stdout": ""} - ) - with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}): - self.assertEqual(dpkg.file_dict("httpd"), "Error: " + DPKG_ERROR_MSG) - - def test_bin_pkg_info_spaces(self): - """ - Test the bin_pkg_info function - """ - file_proto_mock = MagicMock(return_value=True) - with patch.dict(dpkg.__salt__, {"config.valid_fileproto": file_proto_mock}): - cache_mock = MagicMock(return_value="/path/to/some/package.deb") - with patch.dict(dpkg.__salt__, {"cp.cache_file": cache_mock}): - dpkg_info_mock = MagicMock( - return_value={ - "retcode": 0, - "stderr": "", - "stdout": ( - " new Debian package, version 2.0\n" - " size 123456 bytes: control archive: 4029 bytes.\n" - " Package : package_name\n" - " Version : 1.0\n" - " Section : section_name\n" - " Priority : priority\n" - " Architecture : all\n" - " Description : some package\n" - ), - } - ) - with patch.dict(dpkg.__salt__, {"cmd.run_all": dpkg_info_mock}): - self.assertEqual( - dpkg.bin_pkg_info("package.deb")["name"], "package_name" - ) - - def test_bin_pkg_info_no_spaces(self): - """ - Test the bin_pkg_info function - """ - file_proto_mock = MagicMock(return_value=True) - with patch.dict(dpkg.__salt__, {"config.valid_fileproto": file_proto_mock}): - cache_mock = MagicMock(return_value="/path/to/some/package.deb") - with patch.dict(dpkg.__salt__, {"cp.cache_file": cache_mock}): - dpkg_info_mock = MagicMock( - return_value={ - "retcode": 0, - "stderr": "", - "stdout": ( - " new Debian package, version 2.0\n" - " size 123456 bytes: control archive: 4029 bytes.\n" - " Package: package_name\n" - " Version: 1.0\n" - " Section: section_name\n" - " Priority: priority\n" - " Architecture: all\n" - " Description: some package\n" - ), - } - ) - with patch.dict(dpkg.__salt__, {"cmd.run_all": dpkg_info_mock}): - self.assertEqual( - dpkg.bin_pkg_info("package.deb")["name"], "package_name" - ) - - def test_info(self): - """ - Test package info - """ - mock = MagicMock( - return_value={ - "retcode": 0, - "stderr": "", - "stdout": os.linesep.join( - [ - "package:bash", - "revision:", - "architecture:amd64", - "maintainer:Ubuntu Developers" - " ", - "summary:", - "source:bash", - "version:4.4.18-2ubuntu1", - "section:shells", - "installed_size:1588", - "size:", - "MD5:", - "SHA1:", - "SHA256:", - "origin:", - "homepage:http://tiswww.case.edu/php/chet/bash/bashtop.html", - "status:ii ", - "description:GNU Bourne Again SHell", - " Bash is an sh-compatible command language interpreter that" - " executes", - " commands read from the standard input or from a file. Bash" - " also", - " incorporates useful features from the Korn and C shells (ksh" - " and csh).", - " .", - " Bash is ultimately intended to be a conformant implementation" - " of the", - " IEEE POSIX Shell and Tools specification (IEEE Working Group" - " 1003.2).", - " .", - " The Programmable Completion Code, by Ian Macdonald, is now" - " found in", - " the bash-completion package.", - "", - "*/~^\\*", # pylint: disable=W1401 - ] - ), - } - ) - - with patch.dict(dpkg.__salt__, {"cmd.run_all": mock}), patch.dict( - dpkg.__grains__, {"os": "Ubuntu", "osrelease_info": (18, 4)} - ), patch("salt.utils.path.which", MagicMock(return_value=False)), patch( - "os.path.exists", MagicMock(return_value=False) - ), patch( - "os.path.getmtime", MagicMock(return_value=1560199259.0) - ): - self.assertDictEqual( - dpkg.info("bash"), - { - "bash": { - "architecture": "amd64", - "description": os.linesep.join( - [ - "GNU Bourne Again SHell", - " Bash is an sh-compatible command language interpreter" - " that executes", - " commands read from the standard input or from a file." - " Bash also", - " incorporates useful features from the Korn and C" - " shells (ksh and csh).", - " .", - " Bash is ultimately intended to be a conformant" - " implementation of the", - " IEEE POSIX Shell and Tools specification (IEEE" - " Working Group 1003.2).", - " .", - " The Programmable Completion Code, by Ian Macdonald," - " is now found in", - " the bash-completion package." + os.linesep, - ] - ), - "homepage": "http://tiswww.case.edu/php/chet/bash/bashtop.html", - "maintainer": ( - "Ubuntu Developers " - ), - "package": "bash", - "section": "shells", - "source": "bash", - "status": "ii", - "version": "4.4.18-2ubuntu1", - } - }, - ) diff --git a/tests/unit/modules/test_drbd.py b/tests/unit/modules/test_drbd.py deleted file mode 100644 index 1c2fd5f5966c..000000000000 --- a/tests/unit/modules/test_drbd.py +++ /dev/null @@ -1,191 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - - -import salt.modules.drbd as drbd -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class DrbdTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.drbd - """ - - def setup_loader_modules(self): - return {drbd: {}} - - # 'overview' function tests: 1 - - def test_overview(self): - """ - Test if it shows status of the DRBD devices - """ - ret = { - "connection state": "True", - "device": "Stack", - "fs": "None", - "local disk state": "UpToDate", - "local role": "master", - "minor number": "Salt", - "mountpoint": "True", - "partner disk state": "UpToDate", - "partner role": "minion", - "percent": "888", - "remains": "666", - "total size": "50", - "used": "50", - } - mock = MagicMock( - return_value=( - "Salt:Stack True master/minion UpToDate/UpToDate True None 50 50 666 888" - ) - ) - with patch.dict(drbd.__salt__, {"cmd.run": mock}): - self.assertDictEqual(drbd.overview(), ret) - - ret = { - "connection state": "True", - "device": "Stack", - "local disk state": "UpToDate", - "local role": "master", - "minor number": "Salt", - "partner disk state": "partner", - "partner role": "minion", - "synched": "5050", - "synchronisation: ": "syncbar", - } - mock = MagicMock( - return_value=( - "Salt:Stack True master/minion UpToDate/partner syncbar None 50 50" - ) - ) - with patch.dict(drbd.__salt__, {"cmd.run": mock}): - self.assertDictEqual(drbd.overview(), ret) - - def test_status(self): - """ - Test if it shows status of the DRBD resources via drbdadm - """ - ret = [ - { - "local role": "Primary", - "local volumes": [{"disk": "UpToDate"}], - "peer nodes": [ - { - "peer volumes": [ - { - "done": "96.47", - "peer-disk": "Inconsistent", - "replication": "SyncSource", - } - ], - "peernode name": "opensuse-node2", - "role": "Secondary", - } - ], - "resource name": "single", - } - ] - - mock = MagicMock( - return_value=""" -single role:Primary - disk:UpToDate - opensuse-node2 role:Secondary - replication:SyncSource peer-disk:Inconsistent done:96.47 -""" - ) - - with patch.dict(drbd.__salt__, {"cmd.run": mock}): - try: # python2 - self.assertItemsEqual(drbd.status(), ret) - except AttributeError: # python3 - self.assertCountEqual(drbd.status(), ret) - - ret = [ - { - "local role": "Primary", - "local volumes": [ - {"disk": "UpToDate", "volume": "0"}, - {"disk": "UpToDate", "volume": "1"}, - ], - "peer nodes": [ - { - "peer volumes": [ - {"peer-disk": "UpToDate", "volume": "0"}, - {"peer-disk": "UpToDate", "volume": "1"}, - ], - "peernode name": "node2", - "role": "Secondary", - }, - { - "peer volumes": [ - {"peer-disk": "UpToDate", "volume": "0"}, - {"peer-disk": "UpToDate", "volume": "1"}, - ], - "peernode name": "node3", - "role": "Secondary", - }, - ], - "resource name": "test", - }, - { - "local role": "Primary", - "local volumes": [ - {"disk": "UpToDate", "volume": "0"}, - {"disk": "UpToDate", "volume": "1"}, - ], - "peer nodes": [ - { - "peer volumes": [ - {"peer-disk": "UpToDate", "volume": "0"}, - {"peer-disk": "UpToDate", "volume": "1"}, - ], - "peernode name": "node2", - "role": "Secondary", - }, - { - "peer volumes": [ - {"peer-disk": "UpToDate", "volume": "0"}, - {"peer-disk": "UpToDate", "volume": "1"}, - ], - "peernode name": "node3", - "role": "Secondary", - }, - ], - "resource name": "res", - }, - ] - - mock = MagicMock( - return_value=""" -res role:Primary - volume:0 disk:UpToDate - volume:1 disk:UpToDate - node2 role:Secondary - volume:0 peer-disk:UpToDate - volume:1 peer-disk:UpToDate - node3 role:Secondary - volume:0 peer-disk:UpToDate - volume:1 peer-disk:UpToDate - -test role:Primary - volume:0 disk:UpToDate - volume:1 disk:UpToDate - node2 role:Secondary - volume:0 peer-disk:UpToDate - volume:1 peer-disk:UpToDate - node3 role:Secondary - volume:0 peer-disk:UpToDate - volume:1 peer-disk:UpToDate - -""" - ) - with patch.dict(drbd.__salt__, {"cmd.run": mock}): - try: # python2 - self.assertItemsEqual(drbd.status(), ret) - except AttributeError: # python3 - self.assertCountEqual(drbd.status(), ret) diff --git a/tests/unit/modules/test_haproxyconn.py b/tests/unit/modules/test_haproxyconn.py deleted file mode 100644 index 9b248c2a2f13..000000000000 --- a/tests/unit/modules/test_haproxyconn.py +++ /dev/null @@ -1,199 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - - -import salt.modules.haproxyconn as haproxyconn -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.unit import TestCase - - -class Mockcmds: - """ - Mock of cmds - """ - - def __init__(self): - self.backend = None - self.server = None - self.weight = None - - def listServers(self, backend): - """ - Mock of listServers method - """ - self.backend = backend - return ( - "Name: server01 Status: UP Weight: 1 bIn: 22 bOut: 12\n" - "Name: server02 Status: MAINT Weight: 2 bIn: 0 bOut: 0" - ) - - def enableServer(self, server, backend): - """ - Mock of enableServer method - """ - self.backend = backend - self.server = server - return "server enabled" - - def disableServer(self, server, backend): - """ - Mock of disableServer method - """ - self.backend = backend - self.server = server - return "server disabled" - - def getWeight(self, server, backend, weight=0): - """ - Mock of getWeight method - """ - self.backend = backend - self.server = server - self.weight = weight - return "server weight" - - @staticmethod - def showFrontends(): - """ - Mock of showFrontends method - """ - return "frontend-alpha\nfrontend-beta\nfrontend-gamma" - - @staticmethod - def showBackends(): - """ - Mock of showBackends method - """ - return "backend-alpha\nbackend-beta\nbackend-gamma" - - -class Mockhaproxy: - """ - Mock of haproxy - """ - - def __init__(self): - self.cmds = Mockcmds() - - -class MockHaConn: - """ - Mock of HaConn - """ - - def __init__(self, socket=None): - self.ha_cmd = None - - def sendCmd(self, ha_cmd, objectify=False): - """ - Mock of sendCmd method - """ - self.ha_cmd = ha_cmd - self.objectify = objectify - return ha_cmd - - -class HaproxyConnTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.haproxyconn - """ - - def setup_loader_modules(self): - return {haproxyconn: {"haproxy": Mockhaproxy(), "_get_conn": MockHaConn}} - - # 'list_servers' function tests: 1 - - def test_list_servers(self): - """ - Test list_servers - """ - self.assertTrue(haproxyconn.list_servers("mysql")) - - # 'enable_server' function tests: 1 - - def test_enable_server(self): - """ - Test enable_server - """ - self.assertTrue(haproxyconn.enable_server("web1.salt.com", "www")) - - # 'disable_server' function tests: 1 - - def test_disable_server(self): - """ - Test disable_server - """ - self.assertTrue(haproxyconn.disable_server("db1.salt.com", "mysql")) - - # 'get_weight' function tests: 1 - - def test_get_weight(self): - """ - Test get the weight of a server - """ - self.assertTrue(haproxyconn.get_weight("db1.salt.com", "mysql")) - - # 'set_weight' function tests: 1 - - def test_set_weight(self): - """ - Test setting the weight of a given server - """ - self.assertTrue(haproxyconn.set_weight("db1.salt.com", "mysql", weight=11)) - - # 'show_frontends' function tests: 1 - - def test_show_frontends(self): - """ - Test print all frontends received from the HAProxy socket - """ - self.assertTrue(haproxyconn.show_frontends()) - - def test_list_frontends(self): - """ - Test listing all frontends - """ - self.assertEqual( - sorted(haproxyconn.list_frontends()), - sorted(["frontend-alpha", "frontend-beta", "frontend-gamma"]), - ) - - # 'show_backends' function tests: 1 - - def test_show_backends(self): - """ - Test print all backends received from the HAProxy socket - """ - self.assertTrue(haproxyconn.show_backends()) - - def test_list_backends(self): - """ - Test listing of all backends - """ - self.assertEqual( - sorted(haproxyconn.list_backends()), - sorted(["backend-alpha", "backend-beta", "backend-gamma"]), - ) - - def test_get_backend(self): - """ - Test get_backend and compare returned value - """ - expected_data = { - "server01": {"status": "UP", "weight": 1, "bin": 22, "bout": 12}, - "server02": {"status": "MAINT", "weight": 2, "bin": 0, "bout": 0}, - } - self.assertDictEqual(haproxyconn.get_backend("test"), expected_data) - - def test_wait_state_true(self): - """ - Test a successful wait for state - """ - self.assertTrue(haproxyconn.wait_state("test", "server01")) - - def test_wait_state_false(self): - """ - Test a failed wait for state, with a timeout of 0 - """ - self.assertFalse(haproxyconn.wait_state("test", "server02", "up", 0)) diff --git a/tests/unit/modules/test_hashutil.py b/tests/unit/modules/test_hashutil.py deleted file mode 100644 index bb3ac46d0a9d..000000000000 --- a/tests/unit/modules/test_hashutil.py +++ /dev/null @@ -1,63 +0,0 @@ -import os - -import salt.config -import salt.loader -from tests.support.case import ModuleCase -from tests.support.mixins import RUNTIME_VARS - - -class HashutilTestCase(ModuleCase): - the_string = "get salted" - the_string_base64 = "Z2V0IHNhbHRlZA==\n" - the_string_md5 = "2aacf29e92feaf528fb738bcf9d647ac" - the_string_sha256 = ( - "d49859ccbc854fa68d800b5734efc70d72383e6479d545468bc300263164ff33" - ) - the_string_sha512 = "a8c174a7941c64a068e686812a2fafd7624c840fde800f5965fbeca675f2f6e37061ffe41e17728c919bdea290eab7a21e13c04ae71661955a87f2e0e04bb045" - the_string_hmac = "eBWf9bstXg+NiP5AOwppB5HMvZiYMPzEM9W5YMm/AmQ=" - the_string_hmac_compute = ( - "78159ff5bb2d5e0f8d88fe403b0a690791ccbd989830fcc433d5b960c9bf0264" - ) - the_string_github = "sha1=b06aa56bdf4935eec82c4e53e83ed03f03fdb32d" - - def setUp(self): - minion_opts = salt.config.minion_config( - os.path.join(RUNTIME_VARS.TMP_CONF_DIR, "minion") - ) - self.hashutil = salt.loader.raw_mod(minion_opts, "hashutil", None) - - def test_base64_encodestring(self): - ret = self.hashutil["hashutil.base64_encodestring"](self.the_string) - self.assertEqual(ret, self.the_string_base64) - - def test_base64_decodestring(self): - ret = self.hashutil["hashutil.base64_decodestring"](self.the_string_base64) - self.assertEqual(ret, self.the_string) - - def test_md5_digest(self): - ret = self.hashutil["hashutil.md5_digest"](self.the_string) - self.assertEqual(ret, self.the_string_md5) - - def test_sha256_digest(self): - ret = self.hashutil["hashutil.sha256_digest"](self.the_string) - self.assertEqual(ret, self.the_string_sha256) - - def test_sha512_digest(self): - ret = self.hashutil["hashutil.sha512_digest"](self.the_string) - self.assertEqual(ret, self.the_string_sha512) - - def test_hmac_signature(self): - ret = self.hashutil["hashutil.hmac_signature"]( - self.the_string, "shared secret", self.the_string_hmac - ) - self.assertTrue(ret) - - def test_hmac_compute(self): - ret = self.hashutil["hashutil.hmac_compute"](self.the_string, "shared secret") - self.assertEqual(ret, self.the_string_hmac_compute) - - def test_github_signature(self): - ret = self.hashutil["hashutil.github_signature"]( - self.the_string, "shared secret", self.the_string_github - ) - self.assertTrue(ret) diff --git a/tests/unit/modules/test_heat.py b/tests/unit/modules/test_heat.py index 9520085bddfb..4aaec60eecd0 100644 --- a/tests/unit/modules/test_heat.py +++ b/tests/unit/modules/test_heat.py @@ -4,7 +4,6 @@ import salt.modules.heat as heat import salt.modules.win_file as win_file import salt.utils.platform -import salt.utils.win_dacl as dacl from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch from tests.support.runtests import RUNTIME_VARS @@ -78,10 +77,7 @@ def setup_loader_modules(self): "config.backup_mode": MagicMock(return_value=False), }, }, - win_file: { - "__utils__": {"dacl.check_perms": salt.utils.win_dacl.check_perms} - }, - dacl: {"__opts__": {"test": False}}, + win_file: {"__opts__": {"test": False}}, } def setUp(self): diff --git a/tests/unit/modules/test_junos.py b/tests/unit/modules/test_junos.py deleted file mode 100644 index 8f23cb95f93e..000000000000 --- a/tests/unit/modules/test_junos.py +++ /dev/null @@ -1,2766 +0,0 @@ -""" - :codeauthor: Rajvi Dhimar -""" -import os - -import pytest -import yaml - -import salt.modules.junos as junos -from tests.support.mixins import LoaderModuleMockMixin, XMLEqualityMixin -from tests.support.mock import ANY, MagicMock, PropertyMock, call, mock_open, patch -from tests.support.unit import TestCase - -try: - from lxml import etree -except ImportError: - import xml.etree.ElementTree as etree - -try: - import jnpr.junos.op as tables_dir - import jxmlease # pylint: disable=unused-import - from jnpr.junos.device import Device - from jnpr.junos.exception import ConnectClosedError, LockError, UnlockError - from jnpr.junos.utils.config import Config - from jnpr.junos.utils.sw import SW - - HAS_JUNOS = True -except ImportError: - HAS_JUNOS = False - - -@pytest.mark.skipif( - not HAS_JUNOS, reason="The junos-eznc and jxmlease modules are required" -) -class Test_Junos_Module(TestCase, LoaderModuleMockMixin, XMLEqualityMixin): - def setup_loader_modules(self): - return { - junos: { - "__proxy__": { - "junos.conn": self.make_connect, - "junos.get_serialized_facts": self.get_facts, - "junos.reboot_active": MagicMock(return_value=True), - "junos.reboot_clear": MagicMock(return_value=True), - }, - "__salt__": { - "cp.get_template": self.mock_cp, - "cp.get_file": self.mock_cp, - "file.file_exists": MagicMock(return_value=True), - "slsutil.renderer": MagicMock( - return_value="set system host-name dummy" - ), - "event.fire_master": MagicMock(return_value=None), - }, - "_restart_connection": MagicMock(return_value=None), - }, - } - - def mock_cp(self, *args, **kwargs): - pass - - def make_connect(self): - with patch("ncclient.manager.connect") as mock_connect: - self.dev = Device( - host="1.1.1.1", - user="test", - password="test123", - fact_style="old", - gather_facts=False, - ) - self.dev.open() - self.dev.timeout = 30 - self.dev.bind(cu=Config) - self.dev.bind(sw=SW) - self.addCleanup(delattr, self, "dev") - return self.dev - - def raise_exception(self, *args, **kwargs): - raise Exception("Test exception") - - def get_facts(self): - facts = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - return facts - - def test__timeout_decorator(self): - with patch( - "jnpr.junos.Device.timeout", new_callable=PropertyMock - ) as mock_timeout: - mock_timeout.return_value = 30 - - def function(x): - return x - - decorator = junos._timeout_decorator(function) - decorator("Test Mock", dev_timeout=10) - calls = [call(), call(10), call(30)] - mock_timeout.assert_has_calls(calls) - - def test__timeout_cleankwargs_decorator(self): - with patch( - "jnpr.junos.Device.timeout", new_callable=PropertyMock - ) as mock_timeout: - mock_timeout.return_value = 30 - - def function(x): - return x - - decorator = junos._timeout_decorator_cleankwargs(function) - decorator("Test Mock", dev_timeout=10, __pub_args="abc") - calls = [call(), call(10), call(30)] - mock_timeout.assert_has_calls(calls) - - def test_facts_refresh(self): - with patch("salt.modules.saltutil.sync_grains") as mock_sync_grains: - ret = dict() - ret["facts"] = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - ret["out"] = True - self.assertEqual(junos.facts_refresh(), ret) - - def test_facts_refresh_exception(self): - with patch("jnpr.junos.device.Device.facts_refresh") as mock_facts_refresh: - mock_facts_refresh.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.facts_refresh(), ret) - - def test_facts(self): - ret = dict() - ret["facts"] = { - "2RE": True, - "HOME": "/var/home/regress", - "RE0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 54 seconds", - }, - "RE1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - "up_time": "11 days, 23 hours, 16 minutes, 41 seconds", - }, - "RE_hw_mi": False, - "current_re": ["re0", "master", "node", "fwdd", "member", "pfem"], - "domain": "englab.juniper.net", - "fqdn": "R1_re0.englab.juniper.net", - "hostname": "R1_re0", - "hostname_info": {"re0": "R1_re0", "re1": "R1_re01"}, - "ifd_style": "CLASSIC", - "junos_info": { - "re0": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - "re1": { - "object": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "text": "16.1I20160413_0837_aamish", - }, - }, - "master": "RE0", - "model": "MX240", - "model_info": {"re0": "MX240", "re1": "MX240"}, - "personality": "MX", - "re_info": { - "default": { - "0": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - "1": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "backup", - "model": "RE-VMX", - "status": "OK", - }, - "default": { - "last_reboot_reason": "0x200:normal shutdown", - "mastership_state": "master", - "model": "RE-VMX", - "status": "OK", - }, - } - }, - "re_master": {"default": "0"}, - "serialnumber": "VMX4eaf", - "srx_cluster": None, - "switch_style": "BRIDGE_DOMAIN", - "vc_capable": False, - "vc_fabric": None, - "vc_master": None, - "vc_mode": None, - "version": "16.1I20160413_0837_aamish", - "version_RE0": "16.1I20160413_0837_aamish", - "version_RE1": "16.1I20160413_0837_aamish", - "version_info": { - "build": None, - "major": (16, 1), - "minor": "20160413_0837_aamish", - "type": "I", - }, - "virtual": True, - } - ret["out"] = True - self.assertEqual(junos.facts(), ret) - - def test_facts_exception(self): - with patch.dict( - junos.__proxy__, {"junos.get_serialized_facts": self.raise_exception} - ): - ret = dict() - ret["message"] = 'Could not display facts due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.facts(), ret) - - def test_set_hostname_without_args(self): - ret = dict() - ret["message"] = "Please provide the hostname." - ret["out"] = False - self.assertEqual(junos.set_hostname(), ret) - - def test_set_hostname_load_called_with_valid_name(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load: - junos.set_hostname("test-name") - mock_load.assert_called_with("set system host-name test-name", format="set") - - def test_set_hostname_raise_exception_for_load(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load: - mock_load.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Could not load configuration due to error "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("Test-name"), ret) - - def test_set_hostname_raise_exception_for_commit_check(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not commit check due to error "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("test-name"), ret) - - def test_set_hostname_one_arg_parsed_correctly(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": ["test-name", {"comment": "Committed via salt"}], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - junos.set_hostname("test-name", **args) - mock_commit.assert_called_with(comment="Committed via salt") - - def test_set_hostname_more_than_one_args_parsed_correctly(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": [ - "test-name", - {"comment": "Committed via salt", "confirm": 5}, - ], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - junos.set_hostname("test-name", **args) - mock_commit.assert_called_with(comment="Committed via salt", confirm=5) - - def test_set_hostname_successful_return_message(self): - with patch("jnpr.junos.utils.config.Config.load") as mock_load, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "Committed via salt", - "__pub_user": "root", - "__pub_arg": ["test-name", {"comment": "Committed via salt"}], - "__pub_fun": "junos.set_hostname", - "__pub_jid": "20170220210915624885", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully changed hostname." - ret["out"] = True - self.assertEqual(junos.set_hostname("test-name", **args), ret) - - def test_set_hostname_raise_exception_for_commit(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit: - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Successfully loaded host-name but commit failed with "Test exception"' - ret["out"] = False - self.assertEqual(junos.set_hostname("test-name"), ret) - - def test_set_hostname_fail_commit_check(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch("salt.modules.junos.rollback") as mock_rollback: - mock_commit_check.return_value = False - ret = dict() - ret["out"] = False - ret[ - "message" - ] = "Successfully loaded host-name but pre-commit check failed." - self.assertEqual(junos.set_hostname("test"), ret) - - def test_commit_without_args(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit.return_value = True - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Commit Successful." - ret["out"] = True - self.assertEqual(junos.commit(), ret) - - def test_commit_raise_commit_check_exception(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not perform commit check due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.commit(), ret) - - def test_commit_raise_commit_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret["out"] = False - ret[ - "message" - ] = 'Commit check succeeded but actual commit failed with "Test exception"' - self.assertEqual(junos.commit(), ret) - - def test_commit_with_single_argument(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [{"sync": True}], - "sync": True, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.commit(**args) - mock_commit.assert_called_with(detail=False, sync=True) - - def test_commit_with_multiple_arguments(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit_check.return_value = True - args = { - "comment": "comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - {"comment": "comitted via salt", "confirm": 3, "detail": True} - ], - "confirm": 3, - "detail": True, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182856987820", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.commit(**args) - mock_commit.assert_called_with( - comment="comitted via salt", detail=True, confirm=3 - ) - - def test_commit_pyez_commit_returning_false(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit: - mock_commit.return_value = False - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "Commit failed." - ret["out"] = False - self.assertEqual(junos.commit(), ret) - - def test_commit_pyez_commit_check_returns_false(self): - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_commit_check: - mock_commit_check.return_value = False - ret = dict() - ret["out"] = False - ret["message"] = "Pre-commit check failed." - self.assertEqual(junos.commit(), ret) - - def test_rollback_exception(self): - with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: - mock_rollback.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Rollback failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_without_args_success(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - mock_rollback.return_value = True - ret = dict() - ret["message"] = "Rollback successful" - ret["out"] = True - self.assertEqual(junos.rollback(), ret) - - def test_rollback_without_args_fail(self): - with patch("jnpr.junos.utils.config.Config.rollback") as mock_rollback: - mock_rollback.return_value = False - ret = dict() - ret["message"] = "Rollback failed" - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_with_id(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - junos.rollback(id=5) - mock_rollback.assert_called_with(5) - - def test_rollback_with_id_and_single_arg(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [2, {"confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221184518526067", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(id=2, **args) - mock_rollback.assert_called_with(2) - mock_commit.assert_called_with(confirm=2) - - def test_rollback_with_id_and_multiple_args(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "comment": "Comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - 2, - {"comment": "Comitted via salt", "dev_timeout": 40, "confirm": 1}, - ], - "confirm": 1, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221192708251721", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(id=2, **args) - mock_rollback.assert_called_with(2) - mock_commit.assert_called_with( - comment="Comitted via salt", confirm=1, dev_timeout=40 - ) - - def test_rollback_with_only_single_arg(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "__pub_user": "root", - "__pub_arg": [{"sync": True}], - "sync": True, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221193615696475", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(**args) - mock_rollback.assert_called_once_with(0) - mock_commit.assert_called_once_with(sync=True) - - def test_rollback_with_only_multiple_args_no_id(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - args = { - "comment": "Comitted via salt", - "__pub_user": "root", - "__pub_arg": [ - {"comment": "Comitted via salt", "confirm": 3, "sync": True} - ], - "confirm": 3, - "sync": True, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221193945996362", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rollback(**args) - mock_rollback.assert_called_with(0) - mock_commit.assert_called_once_with( - sync=True, confirm=3, comment="Comitted via salt" - ) - - def test_rollback_with_diffs_file_option_when_diff_is_None(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff: - mock_commit_check.return_value = True - mock_diff.return_value = "diff" - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221205153884009", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - "diffs_file": "/home/regress/diff", - } - junos.rollback(**args) - mock_fopen.assert_called_with("/home/regress/diff", "w") - - def test_rollback_with_diffs_file_option(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff: - mock_commit_check.return_value = True - mock_diff.return_value = None - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "/home/regress/diff", "confirm": 2}], - "confirm": 2, - "__pub_fun": "junos.rollback", - "__pub_jid": "20170221205153884009", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - "diffs_file": "/home/regress/diff", - } - junos.rollback(**args) - assert not mock_fopen.called - - def test_rollback_commit_check_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not commit check due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_commit_exception(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.commit" - ) as mock_commit, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Rollback successful but commit failed with error "Test exception"' - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_rollback_commit_check_fails(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.rollback" - ) as mock_rollback: - mock_commit_check.return_value = False - ret = dict() - ret["message"] = "Rollback successful but pre-commit check failed." - ret["out"] = False - self.assertEqual(junos.rollback(), ret) - - def test_diff_without_args(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - junos.diff() - mock_diff.assert_called_with(rb_id=0) - - def test_diff_with_arg(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - junos.diff(id=2) - mock_diff.assert_called_with(rb_id=2) - - def test_diff_exception(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff: - mock_diff.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not get diff with error "Test exception"' - ret["out"] = False - self.assertEqual(junos.diff(), ret) - - def test_ping_without_args(self): - ret = dict() - ret["message"] = "Please specify the destination ip to ping." - ret["out"] = False - self.assertEqual(junos.ping(), ret) - - def test_ping(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - junos.ping("1.1.1.1") - args = mock_execute.call_args - rpc = "51.1.1.1" - self.assertEqualXML(args[0][0], rpc) - - def test_ping_ttl(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - args = { - "__pub_user": "sudo_drajvi", - "__pub_arg": ["1.1.1.1", {"ttl": 3}], - "__pub_fun": "junos.ping", - "__pub_jid": "20170306165237683279", - "__pub_tgt": "mac_min", - "ttl": 3, - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.ping("1.1.1.1", **args) - exec_args = mock_execute.call_args - rpc = "51.1.1.13" - self.assertEqualXML(exec_args[0][0], rpc) - - def test_ping_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.ping("1.1.1.1"), ret) - - def test_cli_without_args(self): - ret = dict() - ret["message"] = "Please provide the CLI command to be executed." - ret["out"] = False - self.assertEqual(junos.cli(), ret) - - def test_cli_with_format_as_empty_string(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - junos.cli("show version", format="") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.return_vale = "CLI result" - ret = dict() - ret["message"] = "CLI result" - ret["out"] = True - junos.cli("show version") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli_format_xml(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_jxml, patch( - "salt.modules.junos.etree.tostring" - ) as mock_to_string, patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.return_value = "test" - mock_jxml.return_value = "test" - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "xml"}], - "format": "xml", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "test" - ret["out"] = True - self.assertEqual(junos.cli("show version", **args), ret) - mock_cli.assert_called_with("show version", "xml", warning=False) - mock_to_string.assert_called_once_with("test") - assert mock_jxml.called - - def test_cli_exception_in_cli(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.cli("show version"), ret) - - def test_cli_output_save(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( - "salt.utils.files.fopen" - ) as mock_fopen: - mock_cli.return_value = "Test return" - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], - "format": "text", - "dest": "/path/to/file", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Test return" - ret["out"] = True - self.assertEqual(junos.cli("show version", **args), ret) - mock_fopen.assert_called_with("/path/to/file", "w") - mock_cli.assert_called_with("show version", "text", warning=False) - - def test_cli_output_save_ioexception(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli, patch( - "salt.utils.files.fopen" - ) as mock_fopen: - mock_cli.return_value = "Test return" - mock_fopen.side_effect = IOError() - args = { - "__pub_user": "root", - "__pub_arg": [{"format": "text", "dest": "/path/to/file"}], - "format": "text", - "dest": "/path/to/file", - "__pub_fun": "junos.cli", - "__pub_jid": "20170221182531323467", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = 'Unable to open "/path/to/file" to write' - ret["out"] = False - self.assertEqual(junos.cli("show version", **args), ret) - - def test_shutdown_without_args(self): - ret = dict() - ret["message"] = "Provide either one of the arguments: shutdown or reboot." - ret["out"] = False - self.assertEqual(junos.shutdown(), ret) - - def test_shutdown_with_reboot_args(self): - with patch("salt.modules.junos.SW.reboot") as mock_reboot: - ret = dict() - ret["message"] = "Successfully powered off/rebooted." - ret["out"] = True - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - assert mock_reboot.called - - def test_shutdown_with_poweroff_args(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - ret = dict() - ret["message"] = "Successfully powered off/rebooted." - ret["out"] = True - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - assert mock_poweroff.called - - def test_shutdown_with_shutdown_as_false(self): - ret = dict() - ret["message"] = "Nothing to be done." - ret["out"] = False - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": False}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - self.assertEqual(junos.shutdown(**args), ret) - - def test_shutdown_with_in_min_arg(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - args = { - "__pub_user": "root", - "in_min": 10, - "__pub_arg": [{"in_min": 10, "shutdown": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222231445709212", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.shutdown(**args) - mock_poweroff.assert_called_with(in_min=10) - - def test_shutdown_with_at_arg(self): - with patch("salt.modules.junos.SW.reboot") as mock_reboot: - args = { - "__pub_user": "root", - "__pub_arg": [{"at": "12:00 pm", "reboot": True}], - "reboot": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "201702276857", - "at": "12:00 pm", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.shutdown(**args) - mock_reboot.assert_called_with(at="12:00 pm") - - def test_shutdown_fail_with_exception(self): - with patch("salt.modules.junos.SW.poweroff") as mock_poweroff: - mock_poweroff.side_effect = self.raise_exception - args = { - "__pub_user": "root", - "__pub_arg": [{"shutdown": True}], - "shutdown": True, - "__pub_fun": "junos.shutdown", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = 'Could not poweroff/reboot because "Test exception"' - ret["out"] = False - self.assertEqual(junos.shutdown(**args), ret) - - def test_install_config_without_args(self): - ret = dict() - ret[ - "message" - ] = "Please provide the salt path where the configuration is present" - ret["out"] = False - self.assertEqual(junos.install_config(), ret) - - def test_install_config_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret = dict() - ret["message"] = "Invalid file path." - ret["out"] = False - self.assertEqual(junos.install_config("path"), ret) - - def test_install_config_file_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret = dict() - ret["message"] = "Invalid file path." - ret["out"] = False - self.assertEqual(junos.install_config("path"), ret) - - def test_install_config(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.set"), ret - ) - mock_load.assert_called_with(path="test/path/config", format="set") - - def test_install_config_xml_file(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.xml"), ret - ) - mock_load.assert_called_with(path="test/path/config", format="xml") - - def test_install_config_text_file(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("salt://actual/path/config"), ret) - mock_load.assert_called_with(path="test/path/config", format="text") - - def test_install_config_cache_not_exists(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value=None), - "file.rmdir": MagicMock(return_value="True"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "tempfile.mkdtemp" - ) as mock_mkdtemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_mkdtemp.return_value = "/tmp/argr5351afd" - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config( - "salt://actual/path/config", template_vars=True - ), - ret, - ) - mock_mkstemp.assert_called_with() - - def test_install_config_replace(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"replace": True}], - "replace": True, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.set", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="set", merge=False - ) - - def test_install_config_overwrite(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"overwrite": True}], - "overwrite": True, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config.xml", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="xml", overwrite=True - ) - - def test_install_config_overwrite_false(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"overwrite": False}], - "overwrite": False, - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual( - junos.install_config("salt://actual/path/config", **args), ret - ) - mock_load.assert_called_with( - path="test/path/config", format="text", merge=True - ) - - def test_install_config_load_causes_exception(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_load.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not load configuration due to : "Test exception"' - ret["format"] = "set" - ret["out"] = False - self.assertEqual(junos.install_config(path="actual/path/config.set"), ret) - - def test_install_config_no_diff(self): - with patch("jnpr.junos.utils.config.Config.diff") as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = None - ret = dict() - ret["message"] = "Configuration already applied!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config"), ret) - - def test_install_config_write_diff(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "copy/config/here"}], - "diffs_file": "copy/config/here", - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - mock_fopen.assert_called_with("copy/config/here", "w") - - def test_install_config_write_diff_exception(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as mock_fopen, patch( - "salt.utils.stringutils.to_str" - ) as mock_strgutils, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_strgutils.side_effect = self.raise_exception - - args = { - "__pub_user": "root", - "__pub_arg": [{"diffs_file": "copy/config/here"}], - "diffs_file": "copy/config/here", - "__pub_fun": "junos.install_config", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - - ret = dict() - ret["message"] = "Could not write into diffs_file due to: 'Test exception'" - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - - def test_install_config_commit_params(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - args = { - "comment": "comitted via salt", - "__pub_user": "root", - "__pub_arg": [{"comment": "comitted via salt", "confirm": 3}], - "confirm": 3, - "__pub_fun": "junos.commit", - "__pub_jid": "20170221182856987820", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully loaded and committed!" - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", **args), ret) - mock_commit.assert_called_with(comment="comitted via salt", confirm=3) - - def test_install_config_commit_check_fails(self): - with patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = False - - ret = dict() - ret["message"] = ( - "Loaded configuration but commit check failed, hence rolling back" - " configuration." - ) - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config.xml"), ret) - - def test_install_config_commit_exception(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - mock_commit.side_effect = self.raise_exception - ret = dict() - ret[ - "message" - ] = 'Commit check successful but commit failed with "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_config("actual/path/config"), ret) - - def test_install_config_test_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret["message"] = ( - "Commit check passed, but skipping commit for dry-run and rolling back" - " configuration." - ) - ret["out"] = True - self.assertEqual(junos.install_config("actual/path/config", test=True), ret) - mock_commit.assert_not_called() - - def test_install_config_write_diff_dynamic_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret[ - "message" - ] = "Write diff is not supported with dynamic/ephemeral configuration mode" - ret["out"] = False - self.assertEqual( - junos.install_config( - "actual/path/config", mode="dynamic", diffs_file="/path/to/dif" - ), - ret, - ) - mock_commit.assert_not_called() - - def test_install_config_unknown_mode(self): - with patch("jnpr.junos.utils.config.Config.commit") as mock_commit, patch( - "jnpr.junos.utils.config.Config.commit_check" - ) as mock_commit_check, patch( - "jnpr.junos.utils.config.Config.diff" - ) as mock_diff, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.getsize" - ) as mock_getsize: - mock_isfile.return_value = True - mock_getsize.return_value = 10 - mock_mkstemp.return_value = "test/path/config" - mock_diff.return_value = "diff" - mock_commit_check.return_value = True - ret = dict() - ret["message"] = "install_config failed due to: unsupported action: abcdef" - ret["out"] = False - self.assertEqual( - junos.install_config("actual/path/config", mode="abcdef"), ret - ) - mock_commit.assert_not_called() - - def test_zeroize(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - result = junos.zeroize() - ret = dict() - ret["out"] = True - ret["message"] = "Completed zeroize and rebooted" - mock_cli.assert_called_once_with("request system zeroize") - self.assertEqual(result, ret) - - def test_zeroize_throw_exception(self): - with patch("jnpr.junos.device.Device.cli") as mock_cli: - mock_cli.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not zeroize due to : "Test exception"' - ret["out"] = False - self.assertEqual(junos.zeroize(), ret) - - def test_install_os_without_args(self): - ret = dict() - ret[ - "message" - ] = "Please provide the salt path where the junos image is present." - ret["out"] = False - self.assertEqual(junos.install_os(), ret) - - def test_install_os_cp_fails(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="xxxx"), - "file.rmdir": MagicMock(return_value="True"), - }, - ): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = False - mock_install.return_value = ( - False, - "Invalid path. Please provide a valid image path", - ) - ret = dict() - ret["message"] = ( - "Installation failed. Reason: Invalid path. Please provide a valid" - " image path" - ) - ret["out"] = False - self.assertEqual(junos.install_os("salt://image/path/"), ret) - - def test_install_os_image_cp_fails(self): - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - ret = dict() - ret["message"] = "Invalid path. Please provide a valid image path" - ret["out"] = False - self.assertEqual(junos.install_os("/image/path/"), ret) - - def test_install_os(self): - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="test/path/config"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_failure(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = False, "because we are testing failure" - ret = dict() - ret["out"] = False - ret[ - "message" - ] = "Installation failed. Reason: because we are testing failure" - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_with_reboot_arg(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "jnpr.junos.utils.sw.SW.reboot" - ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.install_os", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret["message"] = "Successfully installed and rebooted!" - ret["out"] = True - self.assertEqual(junos.install_os("path", **args), ret) - - def test_install_os_pyez_install_throws_exception(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Installation failed due to: "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_os("path"), ret) - - def test_install_os_with_reboot_raises_exception(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "jnpr.junos.utils.sw.SW.reboot" - ) as mock_reboot, patch("salt.utils.files.safe_rm") as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - mock_reboot.side_effect = self.raise_exception - args = { - "__pub_user": "root", - "__pub_arg": [{"reboot": True}], - "reboot": True, - "__pub_fun": "junos.install_os", - "__pub_jid": "20170222213858582619", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - ret = dict() - ret[ - "message" - ] = 'Installation successful but reboot failed due to : "Test exception"' - ret["out"] = False - self.assertEqual(junos.install_os("path", **args), ret) - - def test_install_os_no_copy(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path", no_copy=True), ret) - mock_install.assert_called_with( - "path", no_copy=True, progress=True, timeout=1800 - ) - mock_mkstemp.assert_not_called() - mock_safe_rm.assert_not_called() - - def test_install_os_issu(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - self.assertEqual(junos.install_os("path", issu=True), ret) - mock_install.assert_called_with(ANY, issu=True, progress=True, timeout=1800) - - def test_install_os_add_params(self): - with patch("jnpr.junos.utils.sw.SW.install") as mock_install, patch( - "salt.utils.files.safe_rm" - ) as mock_safe_rm, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstemp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "os.path.getsize" - ) as mock_getsize: - mock_getsize.return_value = 10 - mock_isfile.return_value = True - mock_install.return_value = True, "installed" - ret = dict() - ret["out"] = True - ret["message"] = "Installed the os." - remote_path = "/path/to/file" - self.assertEqual( - junos.install_os( - "path", remote_path=remote_path, nssu=True, validate=True - ), - ret, - ) - mock_install.assert_called_with( - ANY, - nssu=True, - remote_path=remote_path, - progress=True, - validate=True, - timeout=1800, - ) - - def test_file_copy_without_args(self): - self.assertRaises(TypeError, junos.file_copy) - - @patch("paramiko.SSHClient") - @patch("scp.SCPClient.put") - @patch("scp.SCPClient.__init__") - def test_file_copy_invalid_src(self, mock_scpclient, mock_put, mock_ssh): - mock_scpclient.return_value = None - invalid_path = "invalid/file/path" - mock_put.side_effect = Exception(invalid_path) - with patch("os.path.isfile") as mock_isfile: - mock_isfile.return_value = False - ret = dict() - ret["message"] = 'Could not copy file : "invalid/file/path"' - ret["out"] = False - self.assertEqual(junos.file_copy(invalid_path, "file"), ret) - - def test_file_copy_without_dest(self): - self.assertRaises(TypeError, junos.file_copy, src="/home/user/config.set") - - def test_file_copy(self): - with patch("salt.modules.junos.SCP") as mock_scp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_isfile.return_value = True - ret = dict() - ret["message"] = "Successfully copied file from test/src/file to file" - ret["out"] = True - self.assertEqual(junos.file_copy(dest="file", src="test/src/file"), ret) - - def test_file_copy_exception(self): - with patch("salt.modules.junos.SCP") as mock_scp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_isfile.return_value = True - mock_scp.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'Could not copy file : "Test exception"' - ret["out"] = False - self.assertEqual(junos.file_copy(dest="file", src="test/src/file"), ret) - - # These test cases test the __virtual__ function, used internally by salt - # to check if the given module is loadable. This function is not used by - # an external user. - - def test_virtual_proxy_unavailable(self): - with patch.dict(junos.__opts__, {}): - res = ( - False, - "The junos or dependent module could not be loaded: " - "junos-eznc or jxmlease or yamlordereddictloader or " - "proxy could not be loaded.", - ) - self.assertEqual(junos.__virtual__(), res) - - def test_virtual_all_true(self): - with patch.dict(junos.__opts__, {"proxy": "test"}): - self.assertEqual(junos.__virtual__(), "junos") - - def test_rpc_without_args(self): - ret = dict() - ret["message"] = "Please provide the rpc to execute." - ret["out"] = False - self.assertEqual(junos.rpc(), ret) - - def test_rpc_get_config_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'RPC execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rpc("get_config"), ret) - - def test_rpc_get_config_filter(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.return_value = etree.XML("") - args = { - "__pub_user": "root", - "__pub_arg": [ - "get-config", - {"filter": ""}, - ], - "__pub_fun": "junos.rpc", - "__pub_jid": "20170314162715866528", - "__pub_tgt": "mac_min", - "__pub_tgt_type": "glob", - "filter": "", - "__pub_ret": "", - } - junos.rpc("get-config", **args) - exec_args = mock_execute.call_args - expected_rpc = ( - "' - ) - self.assertEqualXML(exec_args[0][0], expected_rpc) - - def test_rpc_get_interface_information(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - junos.rpc("get-interface-information", format="json") - args = mock_execute.call_args - expected_rpc = '' - self.assertEqualXML(args[0][0], expected_rpc) - - def test_rpc_get_interface_information_with_kwargs(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - args = { - "__pub_user": "root", - "__pub_arg": [ - "get-interface-information", - "", - "text", - {"terse": True, "interface_name": "lo0", "format": "text"}, - ], - "format": "text", - "terse": True, - "__pub_fun": "junos.rpc", - "__pub_jid": "20170314160943363563", - "__pub_tgt": "mac_min", - "interface_name": "lo0", - "__pub_tgt_type": "glob", - "__pub_ret": "", - } - junos.rpc("get-interface-information", **args) - args = mock_execute.call_args - expected_rpc = ( - '' - "lo0" - ) - self.assertEqualXML(etree.tostring(args[0][0]), expected_rpc) - - def test_rpc_get_chassis_inventory_filter_as_arg(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_jxmlease, patch( - "salt.modules.junos.etree.tostring" - ) as mock_tostring, patch( - "salt.modules.junos.logging.Logger.warning" - ) as mock_warning, patch( - "jnpr.junos.device.Device.execute" - ) as mock_execute: - junos.rpc( - "get-chassis-inventory", - filter="", - ) - mock_warning.assert_called_with( - 'Filter ignored as it is only used with "get-config" rpc' - ) - - def test_rpc_get_interface_information_exception(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.side_effect = self.raise_exception - ret = dict() - ret["message"] = 'RPC execution failed due to "Test exception"' - ret["out"] = False - self.assertEqual(junos.rpc("get_interface_information"), ret) - - def test_rpc_write_file_format_text(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_execute.return_value = etree.XML( - "text rpc reply" - ) - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file", format="text") - writes = m_open.write_calls() - assert writes == ["text rpc reply"], writes - - def test_rpc_write_file_format_json(self): - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file", format="json") - writes = m_open.write_calls() - assert writes == ["json rpc reply"], writes - - def test_rpc_write_file(self): - with patch("salt.modules.junos.jxmlease.parse") as mock_parse, patch( - "salt.modules.junos.etree.tostring" - ) as mock_tostring, patch("jnpr.junos.device.Device.execute") as mock_execute: - mock_tostring.return_value = "xml rpc reply" - with patch("salt.utils.files.fopen", mock_open(), create=True) as m_open: - junos.rpc("get-chassis-inventory", dest="/path/to/file") - writes = m_open.write_calls() - assert writes == ["xml rpc reply"], writes - - def test_lock_success(self): - ret_exp = {"out": True, "message": "Successfully locked the configuration."} - ret = junos.lock() - self.assertEqual(ret, ret_exp) - - def test_lock_error(self): - ret_exp = {"out": False, "message": 'Could not gain lock due to : "LockError"'} - with patch("jnpr.junos.utils.config.Config.lock") as mock_lock: - mock_lock.side_effect = LockError(None) - ret = junos.lock() - self.assertEqual(ret, ret_exp) - - def test_unlock_success(self): - ret_exp = {"out": True, "message": "Successfully unlocked the configuration."} - ret = junos.unlock() - self.assertEqual(ret, ret_exp) - - def test_unlock_error(self): - ret_exp = { - "out": False, - "message": 'Could not unlock configuration due to : "UnlockError"', - } - with patch("jnpr.junos.utils.config.Config.unlock") as mock_unlock: - mock_unlock.side_effect = UnlockError(None) - ret = junos.unlock() - self.assertEqual(ret, ret_exp) - - def test_load_none_path(self): - ret_exp = { - "out": False, - "message": ( - "Please provide the salt path where the configuration is present" - ), - } - ret = junos.load() - self.assertEqual(ret, ret_exp) - - def test_load_wrong_tmp_file(self): - ret_exp = { - "out": False, - "message": ( - 'Could not load configuration due to : "[Errno 2] No such file or' - " directory: '/pat/to/tmp/file'\"" - ), - "format": "text", - } - with patch.dict( - junos.__salt__, - { - "cp.is_cached": MagicMock(return_value="/pat/to/tmp/file"), - "cp.hash_file": MagicMock( - return_value={"hash_type": "sha256", "hsum": "a386e49c17"} - ), - "file.get_hash": MagicMock(return_value="a386e49c17"), - }, - ): - with patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("os.path.getsize") as mock_getsize, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_mkstmp.return_value = "/pat/to/tmp/file" - mock_getsize.return_value = 1000 - ret = junos.load("salt://path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_invalid_path(self): - with patch("salt.utils.files.mkstemp") as mock_mkstmp: - mock_mkstmp.return_value = "/path/to/file" - self.assertRaises(FileNotFoundError, junos.load, path="/path/to/file") - - def test_load_no_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file") - mock_load.assert_called_with(format="text", path="/path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_xml_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("os.path.isfile") as mock_isfile, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file.xml" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.xml") - mock_load.assert_called_with(format="xml", path="/path/to/file.xml") - self.assertEqual(ret, ret_exp) - - def test_load_xml_extension_with_kwargs(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile, patch( - "salt.utils.files.fopen" - ) as fopen, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp: - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.xml", template_vars=dict(hostname="test")) - mock_load.assert_called_with( - format="xml", path="/path/to/file", template_vars={"hostname": "test"} - ) - self.assertEqual(ret, ret_exp) - - def test_load_set_extension(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file.set" - mock_isfile.return_value = True - ret = junos.load("/path/to/file.set") - mock_load.assert_called_with(format="set", path="/path/to/file.set") - self.assertEqual(ret, ret_exp) - - def test_load_replace_true(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", replace=True) - mock_load.assert_called_with( - format="text", merge=False, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_replace_false(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", replace=False) - mock_load.assert_called_with( - format="text", replace=False, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_overwrite_true(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", overwrite=True) - mock_load.assert_called_with( - format="text", overwrite=True, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_overwrite_false(self): - ret_exp = {"out": True, "message": "Successfully loaded the configuration."} - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "salt.utils.files.mkstemp" - ) as mock_mkstmp, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - ret = junos.load("/path/to/file", overwrite=False) - mock_load.assert_called_with( - format="text", merge=True, path="/path/to/file" - ) - self.assertEqual(ret, ret_exp) - - def test_load_error(self): - ret_exp = { - "out": False, - "format": "text", - "message": 'Could not load configuration due to : "Test Error"', - } - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load, patch("salt.utils.files.mkstemp") as mock_mkstmp, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch( - "os.path.isfile" - ) as mock_isfile: - mock_getsize.return_value = 1000 - mock_mkstmp.return_value = "/path/to/file" - mock_isfile.return_value = True - mock_load.side_effect = Exception("Test Error") - ret = junos.load("/path/to/file") - self.assertEqual(ret, ret_exp) - - def test_load_template(self): - ret_exp = { - "out": True, - "message": "Successfully loaded the configuration.", - } - with patch("os.path.getsize") as mock_getsize, patch( - "jnpr.junos.utils.config.Config.load" - ) as mock_load: - ret = junos.load("tests/unit/modules/templates/basic2.j2", test=True) - self.assertEqual(ret, ret_exp) - - def test_commit_check_success(self): - ret_exp = {"out": True, "message": "Commit check succeeded."} - ret = junos.commit_check() - self.assertEqual(ret, ret_exp) - - def test_commit_check_error(self): - ret_exp = {"out": False, "message": "Commit check failed with "} - with patch("jnpr.junos.utils.config.Config.commit_check") as mock_check: - mock_check.side_effect = Exception - ret = junos.commit_check() - self.assertEqual(ret, ret_exp) - - def test_get_table_wrong_path(self): - table = "ModuleTable" - file = "sample.yml" - path = "/path/to/file" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Given table file {} cannot be located".format(file), - } - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( - "salt.utils.files.fopen" - ) as mock_fopen, patch( - "jnpr.junos.factory.FactoryLoader.load" - ) as mock_load: - ret = junos.get_table(table, file, path) - self.assertEqual(ret, ret_exp) - mock_load.assert_not_called() - - def test_get_table_no_path_no_file(self): - table = "ModuleTable" - file = "inventory.yml" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Given table file {} cannot be located".format(file), - } - with patch.dict( - junos.__salt__, {"file.file_exists": MagicMock(return_value=False)} - ): - with patch("jnpr.junos.factory.FactoryLoader.load") as mock_load, patch( - "glob.glob" - ) as mock_fopen: - mock_fopen.return_value = [] - ret = junos.get_table(table, file) - self.assertEqual(ret, ret_exp) - mock_load.assert_not_called() - - def test_get_table_yaml_load_error(self): - table = "ModuleTable" - file = "inventory.yml" - path = "/path/to/file" - message = "File not located test" - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": "Uncaught exception during YAML Load - please report: {}".format( - message - ), - } - with patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as mock_file, patch("glob.glob") as mock_fopen, patch.object( - yaml, "load" - ) as mock_yamlload: - mock_fopen.return_value = ["/path/to/file"] - mock_yamlload.side_effect = OSError(message) - ret = junos.get_table(table, file, path) - self.assertEqual(ret, ret_exp) - - def test_get_table_api_error(self): - table = "sample" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "sample", - "message": ( - "Uncaught exception during get API call - please report: '{}'".format( - str(table) - ) - ), - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "yaml.load" - ) as mock_yamlload, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open: - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], ret_exp["out"]) - self.assertEqual(ret["tablename"], ret_exp["tablename"]) - self.assertEqual(ret["message"], ret_exp["message"]) - - def test_get_table_connect_closed_error(self): - table = "ModuleTable" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - ret_exp = { - "out": False, - "hostname": "1.1.1.1", - "tablename": "ModuleTable", - "message": ( - "Got ConnectClosedError exception. Connection lost with Device(1.1.1.1)" - ), - } - with patch("jnpr.junos.factory.optable.OpTable.get") as mock_load, patch( - "yaml.load" - ) as mock_yamlload, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open: - dev = Device(host="1.1.1.1", user="rick") - mock_load.side_effect = ConnectClosedError(dev) - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], ret_exp["out"]) - self.assertEqual(ret["tablename"], ret_exp["tablename"]) - self.assertEqual(ret["message"], ret_exp["message"]) - - def test_get_table_inventory(self): - table = "ModuleTable" - file = "inventory.yml" - pyez_tables_path = os.path.dirname(os.path.abspath(tables_dir.__file__)) - path = pyez_tables_path - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("yaml.load") as mock_yamlload, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file, path) - self.assertEqual(ret["out"], True) - - def test_get_table_no_path_inventory(self): - table = "ModuleTable" - file = "inventory.yml" - table_yamlload = { - "ModuleTable": { - "item": ( - ".//chassis-sub-module|.//chassis-module|.//chassis-sub-sub-module" - ), - "key": "name", - "rpc": "get-chassis-inventory", - "view": "ModuleTableView", - }, - "ModuleTableView": { - "fields": { - "jname": "name", - "pn": "part-number", - "sn": "serial-number", - "type": "description", - "ver": "version", - }, - }, - } - with patch("jnpr.junos.device.Device.execute") as mock_execute, patch( - "salt.utils.files.fopen", mock_open(), create=True - ) as m_open, patch("yaml.load") as mock_yamlload, patch( - "salt.utils.json.dumps" - ) as mock_dumps: - mock_dumps.return_value = "json rpc reply" - mock_yamlload.return_value = table_yamlload - ret = junos.get_table(table, file) - self.assertEqual(ret["out"], True) diff --git a/tests/unit/modules/test_keystone.py b/tests/unit/modules/test_keystone.py deleted file mode 100644 index cdf6eeb30c9c..000000000000 --- a/tests/unit/modules/test_keystone.py +++ /dev/null @@ -1,1090 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import salt.modules.config as config -import salt.modules.keystone as keystone -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, call, patch -from tests.support.unit import TestCase - - -class MockEC2: - """ - Mock of EC2 class - """ - - def __init__(self): - self.access = "" - self.secret = "" - self.tenant_id = "" - self.user_id = "" - self.connection_args = "" - self.profile = "" - - @staticmethod - def create(userid, tenantid): - """ - Mock of create method - """ - cr_ec2 = MockEC2() - cr_ec2.tenant_id = tenantid - cr_ec2.user_id = userid - return cr_ec2 - - def delete(self, userid, accesskey): - """ - Mock of delete method - """ - self.access = accesskey - self.user_id = userid - return True - - @staticmethod - def get(user_id, access, profile, **connection_args): - """ - Mock of get method - """ - cr_ec2 = MockEC2() - cr_ec2.profile = profile - cr_ec2.access = access - cr_ec2.user_id = user_id - cr_ec2.connection_args = connection_args - return cr_ec2 - - @staticmethod - def list(user_id): - """ - Mock of list method - """ - cr_ec2 = MockEC2() - cr_ec2.user_id = user_id - return [cr_ec2] - - -class MockEndpoints: - """ - Mock of Endpoints class - """ - - def __init__(self): - self.id = "007" - self.region = "RegionOne" - self.adminurl = "adminurl" - self.internalurl = "internalurl" - self.publicurl = "publicurl" - self.service_id = "117" - - @staticmethod - def list(): - """ - Mock of list method - """ - return [MockEndpoints()] - - @staticmethod - def create(region, service_id, publicurl, adminurl, internalurl): - """ - Mock of create method - """ - return (region, service_id, publicurl, adminurl, internalurl) - - @staticmethod - def delete(id): - """ - Mock of delete method - """ - return id - - -class MockServices: - """ - Mock of Services class - """ - - flag = None - - def __init__(self): - self.id = "117" - self.name = "iptables" - self.description = "description" - self.type = "type" - - @staticmethod - def create(name, service_type, description): - """ - Mock of create method - """ - service = MockServices() - service.id = "005" - service.name = name - service.description = description - service.type = service_type - return service - - def get(self, service_id): - """ - Mock of get method - """ - service = MockServices() - if self.flag == 1: - service.id = "asd" - return [service] - elif self.flag == 2: - service.id = service_id - return service - return [service] - - def list(self): - """ - Mock of list method - """ - service = MockServices() - if self.flag == 1: - service.id = "asd" - return [service] - return [service] - - @staticmethod - def delete(service_id): - """ - Mock of delete method - """ - return service_id - - -class MockRoles: - """ - Mock of Roles class - """ - - flag = None - - def __init__(self): - self.id = "113" - self.name = "nova" - self.user_id = "446" - self.tenant_id = "a1a1" - - @staticmethod - def create(name): - """ - Mock of create method - """ - return name - - def get(self, role_id): - """ - Mock of get method - """ - role = MockRoles() - if self.flag == 1: - role.id = None - return role - role.id = role_id - return role - - @staticmethod - def list(): - """ - Mock of list method - """ - return [MockRoles()] - - @staticmethod - def delete(role): - """ - Mock of delete method - """ - return role - - @staticmethod - def add_user_role(user_id, role_id, tenant_id): - """ - Mock of add_user_role method - """ - return (user_id, role_id, tenant_id) - - @staticmethod - def remove_user_role(user_id, role_id, tenant_id): - """ - Mock of remove_user_role method - """ - return (user_id, role_id, tenant_id) - - @staticmethod - def roles_for_user(user, tenant): - """ - Mock of roles_for_user method - """ - role = MockRoles() - role.user_id = user - role.tenant_id = tenant - return [role] - - -class MockTenants: - """ - Mock of Tenants class - """ - - flag = None - - def __init__(self): - self.id = "446" - self.name = "nova" - self.description = "description" - self.enabled = "True" - - @staticmethod - def create(name, description, enabled): - """ - Mock of create method - """ - tenant = MockTenants() - tenant.name = name - tenant.description = description - tenant.enabled = enabled - return tenant - - def get(self, tenant_id): - """ - Mock of get method - """ - tenant = MockTenants() - if self.flag == 1: - tenant.id = None - return tenant - tenant.id = tenant_id - return tenant - - @staticmethod - def list(): - """ - Mock of list method - """ - return [MockTenants()] - - @staticmethod - def delete(tenant_id): - """ - Mock of delete method - """ - return tenant_id - - -class MockServiceCatalog: - """ - Mock of ServiceCatalog class - """ - - def __init__(self): - self.id = "446" - self.expires = "No" - self.user_id = "admin" - self.tenant_id = "ae04" - - def get_token(self): - """ - Mock of get_token method - """ - return { - "id": self.id, - "expires": self.expires, - "user_id": self.user_id, - "tenant_id": self.tenant_id, - } - - -class MockUsers: - """ - Mock of Users class - """ - - flag = None - - def __init__(self): - self.id = "446" - self.name = "nova" - self.email = "salt@saltstack.com" - self.enabled = "True" - self.tenant_id = "a1a1" - self.password = "salt" - - def create(self, name, password, email, tenant_id, enabled): - """ - Mock of create method - """ - user = MockUsers() - user.name = name - user.password = password - user.email = email - user.enabled = enabled - self.tenant_id = tenant_id - return user - - def get(self, user_id): - """ - Mock of get method - """ - user = MockUsers() - if self.flag == 1: - user.id = None - return user - user.id = user_id - return user - - @staticmethod - def list(): - """ - Mock of list method - """ - return [MockUsers()] - - @staticmethod - def delete(user_id): - """ - Mock of delete method - """ - return user_id - - @staticmethod - def update(user, name, email, enabled): - """ - Mock of update method - """ - return (user, name, email, enabled) - - @staticmethod - def update_password(user, password): - """ - Mock of update_password method - """ - return (user, password) - - -class Unauthorized(Exception): - """ - The base exception class for all exceptions. - """ - - def __init__(self, message="Test"): - super().__init__(message) - self.msg = message - - -class AuthorizationFailure(Exception): - """ - Additional exception class to Unauthorized. - """ - - def __init__(self, message="Test"): - super().__init__(message) - self.msg = message - - -class MockExceptions: - """ - Mock of exceptions class - """ - - def __init__(self): - self.Unauthorized = Unauthorized - self.AuthorizationFailure = AuthorizationFailure - - -class MockKeystoneClient: - """ - Mock of keystoneclient module - """ - - def __init__(self): - self.exceptions = MockExceptions() - - -class MockClient: - """ - Mock of Client class - """ - - flag = None - - def __init__(self, profile=None, **conn_args): - self.ec2 = MockEC2() - self.endpoints = MockEndpoints() - self.services = MockServices() - self.roles = MockRoles() - self.tenants = MockTenants() - self.service_catalog = MockServiceCatalog() - self.users = MockUsers() - - def Client(self, **kwargs): - """ - Mock of Client method - """ - if self.flag == 1: - raise Unauthorized - return True - - -class KeystoneTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.keystone - """ - - def setup_loader_modules(self): - return { - keystone: { - "auth": MockClient, - "client": MockClient(), - "keystoneclient": MockKeystoneClient(), - "__salt__": {"config.get": config.get}, - "__opts__": {}, - }, - config: {"__opts__": {}}, - } - - # 'ec2_credentials_create' function tests: 1 - - def test_ec2_credentials_create(self): - """ - Test if it create EC2-compatible credentials for user per tenant - """ - self.assertDictEqual( - keystone.ec2_credentials_create(), {"Error": "Could not resolve User ID"} - ) - - self.assertDictEqual( - keystone.ec2_credentials_create(user_id="salt"), - {"Error": "Could not resolve Tenant ID"}, - ) - - self.assertDictEqual( - keystone.ec2_credentials_create(user_id="salt", tenant_id="72278"), - {"access": "", "tenant_id": "72278", "secret": "", "user_id": "salt"}, - ) - - # 'ec2_credentials_delete' function tests: 1 - - def test_ec2_credentials_delete(self): - """ - Test if it delete EC2-compatible credentials - """ - self.assertDictEqual( - keystone.ec2_credentials_delete(), {"Error": "Could not resolve User ID"} - ) - - self.assertEqual( - keystone.ec2_credentials_delete(user_id="salt", access_key="72278"), - 'ec2 key "72278" deleted under user id "salt"', - ) - - # 'ec2_credentials_get' function tests: 1 - - def test_ec2_credentials_get(self): - """ - Test if it return ec2_credentials for a user - (keystone ec2-credentials-get) - """ - self.assertDictEqual( - keystone.ec2_credentials_get(), {"Error": "Unable to resolve user id"} - ) - - self.assertDictEqual( - keystone.ec2_credentials_get(user_id="salt"), - {"Error": "Access key is required"}, - ) - - self.assertDictEqual( - keystone.ec2_credentials_get( - user_id="salt", access="72278", profile="openstack1" - ), - { - "salt": { - "access": "72278", - "secret": "", - "tenant": "", - "user_id": "salt", - } - }, - ) - - # 'ec2_credentials_list' function tests: 1 - - def test_ec2_credentials_list(self): - """ - Test if it return a list of ec2_credentials - for a specific user (keystone ec2-credentials-list) - """ - self.assertDictEqual( - keystone.ec2_credentials_list(), {"Error": "Unable to resolve user id"} - ) - - self.assertDictEqual( - keystone.ec2_credentials_list(user_id="salt", profile="openstack1"), - {"salt": {"access": "", "secret": "", "tenant_id": "", "user_id": "salt"}}, - ) - - # 'endpoint_get' function tests: 1 - - def test_endpoint_get(self): - """ - Test if it return a specific endpoint (keystone endpoint-get) - """ - self.assertDictEqual( - keystone.endpoint_get("nova", "RegionOne", profile="openstack"), - {"Error": "Could not find the specified service"}, - ) - - ret = {"Error": "Could not find endpoint for the specified service"} - MockServices.flag = 1 - self.assertDictEqual( - keystone.endpoint_get("iptables", "RegionOne", profile="openstack"), ret - ) - - MockServices.flag = 0 - self.assertDictEqual( - keystone.endpoint_get("iptables", "RegionOne", profile="openstack"), - { - "adminurl": "adminurl", - "id": "007", - "internalurl": "internalurl", - "publicurl": "publicurl", - "region": "RegionOne", - "service_id": "117", - }, - ) - - # 'endpoint_list' function tests: 1 - - def test_endpoint_list(self): - """ - Test if it return a list of available endpoints - (keystone endpoints-list) - """ - self.assertDictEqual( - keystone.endpoint_list(profile="openstack1"), - { - "007": { - "adminurl": "adminurl", - "id": "007", - "internalurl": "internalurl", - "publicurl": "publicurl", - "region": "RegionOne", - "service_id": "117", - } - }, - ) - - # 'endpoint_create' function tests: 1 - - def test_endpoint_create(self): - """ - Test if it create an endpoint for an Openstack service - """ - self.assertDictEqual( - keystone.endpoint_create("nova"), - {"Error": "Could not find the specified service"}, - ) - - MockServices.flag = 2 - self.assertDictEqual( - keystone.endpoint_create( - "iptables", - "http://public/url", - "http://internal/url", - "http://adminurl/url", - "RegionOne", - ), - { - "adminurl": "adminurl", - "id": "007", - "internalurl": "internalurl", - "publicurl": "publicurl", - "region": "RegionOne", - "service_id": "117", - }, - ) - - # 'endpoint_delete' function tests: 1 - - def test_endpoint_delete(self): - """ - Test if it delete an endpoint for an Openstack service - """ - ret = {"Error": "Could not find any endpoints for the service"} - self.assertDictEqual(keystone.endpoint_delete("nova", "RegionOne"), ret) - - with patch.object( - keystone, "endpoint_get", MagicMock(side_effect=[{"id": "117"}, None]) - ): - self.assertTrue(keystone.endpoint_delete("iptables", "RegionOne")) - - # 'role_create' function tests: 1 - - def test_role_create(self): - """ - Test if it create named role - """ - self.assertDictEqual( - keystone.role_create("nova"), {"Error": 'Role "nova" already exists'} - ) - - self.assertDictEqual( - keystone.role_create("iptables"), {"Error": "Unable to resolve role id"} - ) - - # 'role_delete' function tests: 1 - - def test_role_delete(self): - """ - Test if it delete a role (keystone role-delete) - """ - self.assertDictEqual( - keystone.role_delete(), {"Error": "Unable to resolve role id"} - ) - - self.assertEqual(keystone.role_delete("iptables"), "Role ID iptables deleted") - - # 'role_get' function tests: 1 - - def test_role_get(self): - """ - Test if it return a specific roles (keystone role-get) - """ - self.assertDictEqual( - keystone.role_get(), {"Error": "Unable to resolve role id"} - ) - - self.assertDictEqual( - keystone.role_get(name="nova"), {"nova": {"id": "113", "name": "nova"}} - ) - - # 'role_list' function tests: 1 - - def test_role_list(self): - """ - Test if it return a list of available roles (keystone role-list) - """ - self.assertDictEqual( - keystone.role_list(), - { - "nova": { - "id": "113", - "name": "nova", - "tenant_id": "a1a1", - "user_id": "446", - } - }, - ) - - # 'service_create' function tests: 1 - - def test_service_create(self): - """ - Test if it add service to Keystone service catalog - """ - MockServices.flag = 2 - self.assertDictEqual( - keystone.service_create("nova", "compute", "OpenStack Service"), - { - "iptables": { - "description": "description", - "id": "005", - "name": "iptables", - "type": "type", - } - }, - ) - - # 'service_delete' function tests: 1 - - def test_service_delete(self): - """ - Test if it delete a service from Keystone service catalog - """ - self.assertEqual( - keystone.service_delete("iptables"), - 'Keystone service ID "iptables" deleted', - ) - - # 'service_get' function tests: 1 - - def test_service_get(self): - """ - Test if it return a list of available services (keystone services-list) - """ - MockServices.flag = 0 - self.assertDictEqual( - keystone.service_get(), {"Error": "Unable to resolve service id"} - ) - - MockServices.flag = 2 - self.assertDictEqual( - keystone.service_get(service_id="c965"), - { - "iptables": { - "description": "description", - "id": "c965", - "name": "iptables", - "type": "type", - } - }, - ) - - # 'service_list' function tests: 1 - - def test_service_list(self): - """ - Test if it return a list of available services (keystone services-list) - """ - MockServices.flag = 0 - self.assertDictEqual( - keystone.service_list(profile="openstack1"), - { - "iptables": { - "description": "description", - "id": "117", - "name": "iptables", - "type": "type", - } - }, - ) - - # 'tenant_create' function tests: 1 - - def test_tenant_create(self): - """ - Test if it create a keystone tenant - """ - self.assertDictEqual( - keystone.tenant_create("nova"), - { - "nova": { - "description": "description", - "id": "446", - "name": "nova", - "enabled": "True", - } - }, - ) - - # 'tenant_delete' function tests: 1 - - def test_tenant_delete(self): - """ - Test if it delete a tenant (keystone tenant-delete) - """ - self.assertDictEqual( - keystone.tenant_delete(), {"Error": "Unable to resolve tenant id"} - ) - - self.assertEqual(keystone.tenant_delete("nova"), "Tenant ID nova deleted") - - # 'tenant_get' function tests: 1 - - def test_tenant_get(self): - """ - Test if it return a specific tenants (keystone tenant-get) - """ - self.assertDictEqual( - keystone.tenant_get(), {"Error": "Unable to resolve tenant id"} - ) - - self.assertDictEqual( - keystone.tenant_get(tenant_id="446"), - { - "nova": { - "description": "description", - "id": "446", - "name": "nova", - "enabled": "True", - } - }, - ) - - # 'tenant_list' function tests: 1 - - def test_tenant_list(self): - """ - Test if it return a list of available tenants (keystone tenants-list) - """ - self.assertDictEqual( - keystone.tenant_list(), - { - "nova": { - "description": "description", - "id": "446", - "name": "nova", - "enabled": "True", - } - }, - ) - - # 'tenant_update' function tests: 1 - - def test_tenant_update(self): - """ - Test if it update a tenant's information (keystone tenant-update) - """ - self.assertDictEqual( - keystone.tenant_update(), {"Error": "Unable to resolve tenant id"} - ) - - # 'token_get' function tests: 1 - - def test_token_get(self): - """ - Test if it return the configured tokens (keystone token-get) - """ - self.assertDictEqual( - keystone.token_get(), - {"expires": "No", "id": "446", "tenant_id": "ae04", "user_id": "admin"}, - ) - - # 'user_list' function tests: 1 - - def test_user_list(self): - """ - Test if it return a list of available users (keystone user-list) - """ - self.assertDictEqual( - keystone.user_list(), - { - "nova": { - "name": "nova", - "tenant_id": "a1a1", - "enabled": "True", - "id": "446", - "password": "salt", - "email": "salt@saltstack.com", - } - }, - ) - - # 'user_get' function tests: 1 - - def test_user_get(self): - """ - Test if it return a specific users (keystone user-get) - """ - self.assertDictEqual( - keystone.user_get(), {"Error": "Unable to resolve user id"} - ) - - self.assertDictEqual( - keystone.user_get(user_id="446"), - { - "nova": { - "name": "nova", - "tenant_id": "a1a1", - "enabled": "True", - "id": "446", - "password": "salt", - "email": "salt@saltstack.com", - } - }, - ) - - # 'user_create' function tests: 1 - - def test_user_create(self): - """ - Test if it create a user (keystone user-create) - """ - self.assertDictEqual( - keystone.user_create( - name="nova", - password="salt", - email="salt@saltstack.com", - tenant_id="a1a1", - ), - { - "nova": { - "name": "nova", - "tenant_id": "a1a1", - "enabled": "True", - "id": "446", - "password": "salt", - "email": "salt@saltstack.com", - } - }, - ) - - # 'user_delete' function tests: 1 - - def test_user_delete(self): - """ - Test if it delete a user (keystone user-delete) - """ - self.assertDictEqual( - keystone.user_delete(), {"Error": "Unable to resolve user id"} - ) - - self.assertEqual(keystone.user_delete("nova"), "User ID nova deleted") - - # 'user_update' function tests: 1 - - def test_user_update(self): - """ - Test if it update a user's information (keystone user-update) - """ - self.assertDictEqual( - keystone.user_update(), {"Error": "Unable to resolve user id"} - ) - - self.assertEqual(keystone.user_update("nova"), "Info updated for user ID nova") - - # 'user_verify_password' function tests: 1 - - def test_user_verify_password(self): - """ - Test if it verify a user's password - """ - mock = MagicMock(return_value="http://127.0.0.1:35357/v2.0") - with patch.dict(keystone.__salt__, {"config.option": mock}): - self.assertDictEqual( - keystone.user_verify_password(), - {"Error": "Unable to resolve user name"}, - ) - - self.assertTrue(keystone.user_verify_password(user_id="446", name="nova")) - - MockClient.flag = 1 - self.assertFalse(keystone.user_verify_password(user_id="446", name="nova")) - - # 'user_password_update' function tests: 1 - - def test_user_password_update(self): - """ - Test if it update a user's password (keystone user-password-update) - """ - self.assertDictEqual( - keystone.user_password_update(), {"Error": "Unable to resolve user id"} - ) - - self.assertEqual( - keystone.user_password_update("nova"), "Password updated for user ID nova" - ) - - # 'user_role_add' function tests: 1 - - def test_user_role_add(self): - """ - Test if it add role for user in tenant (keystone user-role-add) - """ - self.assertEqual( - keystone.user_role_add(user="nova", tenant="nova", role="nova"), - '"nova" role added for user "nova" for "nova" tenant/project', - ) - - MockRoles.flag = 1 - self.assertDictEqual( - keystone.user_role_add(user="nova", tenant="nova", role="nova"), - {"Error": "Unable to resolve role id"}, - ) - - MockTenants.flag = 1 - self.assertDictEqual( - keystone.user_role_add(user="nova", tenant="nova"), - {"Error": "Unable to resolve tenant/project id"}, - ) - - MockUsers.flag = 1 - self.assertDictEqual( - keystone.user_role_add(user="nova"), {"Error": "Unable to resolve user id"} - ) - - # 'user_role_remove' function tests: 1 - - def test_user_role_remove(self): - """ - Test if it add role for user in tenant (keystone user-role-add) - """ - MockUsers.flag = 1 - self.assertDictEqual( - keystone.user_role_remove(user="nova"), - {"Error": "Unable to resolve user id"}, - ) - - MockUsers.flag = 0 - MockTenants.flag = 1 - self.assertDictEqual( - keystone.user_role_remove(user="nova", tenant="nova"), - {"Error": "Unable to resolve tenant/project id"}, - ) - - MockTenants.flag = 0 - MockRoles.flag = 1 - self.assertDictEqual( - keystone.user_role_remove(user="nova", tenant="nova", role="nova"), - {"Error": "Unable to resolve role id"}, - ) - - ret = '"nova" role removed for user "nova" under "nova" tenant' - MockRoles.flag = 0 - self.assertEqual( - keystone.user_role_remove(user="nova", tenant="nova", role="nova"), ret - ) - - # 'user_role_list' function tests: 1 - - def test_user_role_list(self): - """ - Test if it return a list of available user_roles - (keystone user-roles-list) - """ - self.assertDictEqual( - keystone.user_role_list(user="nova"), - {"Error": "Unable to resolve user or tenant/project id"}, - ) - - self.assertDictEqual( - keystone.user_role_list(user_name="nova", tenant_name="nova"), - { - "nova": { - "id": "113", - "name": "nova", - "tenant_id": "446", - "user_id": "446", - } - }, - ) - - def test_api_version_verify_ssl(self): - """ - test api_version when using verify_ssl - """ - test_verify = [True, False, None] - conn_args = { - "keystone.user": "admin", - "connection_password": "password", - "connection_tenant": "admin", - "connection_tenant_id": "id", - "connection_auth_url": "https://127.0.0.1/v2.0/", - "connection_verify_ssl": True, - } - - http_ret = {"dict": {"version": {"id": "id_test"}}} - for verify in test_verify: - mock_http = MagicMock(return_value=http_ret) - patch_http = patch("salt.utils.http.query", mock_http) - conn_args["connection_verify_ssl"] = verify - if verify is None: - conn_args.pop("connection_verify_ssl") - verify = True - - with patch_http: - ret = keystone.api_version(**conn_args) - - self.assertEqual( - mock_http.call_args_list, - [ - call( - "https://127.0.0.1/v2.0/", - decode=True, - decode_type="json", - verify_ssl=verify, - ) - ], - ) diff --git a/tests/unit/modules/test_linux_shadow.py b/tests/unit/modules/test_linux_shadow.py deleted file mode 100644 index ed5b962178ac..000000000000 --- a/tests/unit/modules/test_linux_shadow.py +++ /dev/null @@ -1,312 +0,0 @@ -""" - :codeauthor: Erik Johnson -""" -import textwrap - -import pytest - -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import DEFAULT, MagicMock, mock_open, patch -from tests.support.unit import TestCase - -try: - import spwd -except ImportError: - pass - -try: - import salt.modules.linux_shadow as shadow - - HAS_SHADOW = True -except ImportError: - HAS_SHADOW = False - -_PASSWORD = "lamepassword" - -# Not testing blowfish as it is not available on most Linux distros -_HASHES = dict( - md5=dict(pw_salt="TgIp9OTu", pw_hash="$1$TgIp9OTu$.d0FFP6jVi5ANoQmk6GpM1"), - sha256=dict( - pw_salt="3vINbSrC", - pw_hash="$5$3vINbSrC$hH8A04jAY3bG123yU4FQ0wvP678QDTvWBhHHFbz6j0D", - ), - sha512=dict( - pw_salt="PiGA3V2o", - pw_hash="$6$PiGA3V2o$/PrntRYufz49bRV/V5Eb1V6DdHaS65LB0fu73Tp/xxmDFr6HWJKptY2TvHRDViXZugWpnAcOnrbORpOgZUGTn.", - ), -) - - -@pytest.mark.skip_unless_on_linux -@pytest.mark.skipif(HAS_SHADOW is False, reason="shadow module is not available") -class LinuxShadowTest(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - return {shadow: {}} - - def test_gen_password(self): - """ - Test shadow.gen_password - """ - self.assertTrue(HAS_SHADOW) - for algorithm, hash_info in _HASHES.items(): - self.assertEqual( - shadow.gen_password( - _PASSWORD, crypt_salt=hash_info["pw_salt"], algorithm=algorithm - ), - hash_info["pw_hash"], - ) - - def test_set_password(self): - """ - Test the corner case in which shadow.set_password is called for a user - that has an entry in /etc/passwd but not /etc/shadow. - """ - original_file = textwrap.dedent( - """\ - foo:orighash:17955:::::: - bar:somehash:17955:::::: - """ - ) - original_lines = original_file.splitlines(True) - - data = { - "/etc/shadow": original_file, - "*": Exception("Attempted to open something other than /etc/shadow"), - } - isfile_mock = MagicMock( - side_effect=lambda x: True if x == "/etc/shadow" else DEFAULT - ) - password = "newhash" - shadow_info_mock = MagicMock(return_value={"passwd": password}) - - # - # CASE 1: Normal password change - # - user = "bar" - user_exists_mock = MagicMock( - side_effect=lambda x, **y: 0 if x == ["id", user] else DEFAULT - ) - with patch( - "salt.utils.files.fopen", mock_open(read_data=data) - ) as shadow_mock, patch("os.path.isfile", isfile_mock), patch.object( - shadow, "info", shadow_info_mock - ), patch.dict( - shadow.__salt__, {"cmd.retcode": user_exists_mock} - ), patch.dict( - shadow.__grains__, {"os": "CentOS"} - ): - result = shadow.set_password(user, password, use_usermod=False) - - assert result - filehandles = shadow_mock.filehandles["/etc/shadow"] - # We should only have opened twice, once to read the contents and once - # to write. - assert len(filehandles) == 2 - # We're rewriting the entire file - assert filehandles[1].mode == "w+" - # We should be calling writelines instead of write, to rewrite the - # entire file. - assert len(filehandles[1].writelines_calls) == 1 - # Make sure we wrote the correct info - lines = filehandles[1].writelines_calls[0] - # Should only have the same two users in the file - assert len(lines) == 2 - # The first line should be unchanged - assert lines[0] == original_lines[0] - # The second line should have the new password hash - assert lines[1].split(":")[:2] == [user, password] - - # - # CASE 2: Corner case: no /etc/shadow entry for user - # - user = "baz" - user_exists_mock = MagicMock( - side_effect=lambda x, **y: 0 if x == ["id", user] else DEFAULT - ) - with patch( - "salt.utils.files.fopen", mock_open(read_data=data) - ) as shadow_mock, patch("os.path.isfile", isfile_mock), patch.object( - shadow, "info", shadow_info_mock - ), patch.dict( - shadow.__salt__, {"cmd.retcode": user_exists_mock} - ), patch.dict( - shadow.__grains__, {"os": "CentOS"} - ): - result = shadow.set_password(user, password, use_usermod=False) - - assert result - filehandles = shadow_mock.filehandles["/etc/shadow"] - # We should only have opened twice, once to read the contents and once - # to write. - assert len(filehandles) == 2 - # We're just appending to the file, not rewriting - assert filehandles[1].mode == "a+" - # We should only have written to the file once - assert len(filehandles[1].write_calls) == 1 - # Make sure we wrote the correct info - assert filehandles[1].write_calls[0].split(":")[:2] == [user, password] - - def test_info(self): - """ - Test if info shows the correct user information - """ - - # First test is with a succesful call - expected_result = [ - ("expire", -1), - ("inact", -1), - ("lstchg", 31337), - ("max", 99999), - ("min", 0), - ("name", "foo"), - ("passwd", _HASHES["sha512"]["pw_hash"]), - ("warn", 7), - ] - getspnam_return = spwd.struct_spwd( - ["foo", _HASHES["sha512"]["pw_hash"], 31337, 0, 99999, 7, -1, -1, -1] - ) - with patch("spwd.getspnam", return_value=getspnam_return): - result = shadow.info("foo") - self.assertEqual( - expected_result, sorted(result.items(), key=lambda x: x[0]) - ) - - # The next two is for a non-existent user - expected_result = [ - ("expire", ""), - ("inact", ""), - ("lstchg", ""), - ("max", ""), - ("min", ""), - ("name", ""), - ("passwd", ""), - ("warn", ""), - ] - # We get KeyError exception for non-existent users in glibc based systems - getspnam_return = KeyError - with patch("spwd.getspnam", side_effect=getspnam_return): - result = shadow.info("foo") - self.assertEqual( - expected_result, sorted(result.items(), key=lambda x: x[0]) - ) - # And FileNotFoundError in musl based systems - getspnam_return = FileNotFoundError - with patch("spwd.getspnam", side_effect=getspnam_return): - result = shadow.info("foo") - self.assertEqual( - expected_result, sorted(result.items(), key=lambda x: x[0]) - ) - - @pytest.mark.skip_if_not_root - def test_set_password_malformed_shadow_entry(self): - """ - Test that Salt will repair a malformed shadow entry (that is, one that - doesn't have the correct number of fields). - """ - original_file = textwrap.dedent( - """\ - valid:s00persekr1thash:17955:::::: - tooshort:orighash:17955::::: - toolong:orighash:17955::::::: - """ - ) - original_lines = original_file.splitlines(True) - - data = { - "/etc/shadow": original_file, - "*": Exception("Attempted to open something other than /etc/shadow"), - } - isfile_mock = MagicMock( - side_effect=lambda x: True if x == "/etc/shadow" else DEFAULT - ) - password = "newhash" - shadow_info_mock = MagicMock(return_value={"passwd": password}) - - # - # CASE 1: Fix an entry with too few fields - # - user = "tooshort" - user_exists_mock = MagicMock( - side_effect=lambda x, **y: 0 if x == ["id", user] else DEFAULT - ) - with patch( - "salt.utils.files.fopen", mock_open(read_data=data) - ) as shadow_mock, patch("os.path.isfile", isfile_mock), patch.object( - shadow, "info", shadow_info_mock - ), patch.dict( - shadow.__salt__, {"cmd.retcode": user_exists_mock} - ), patch.dict( - shadow.__grains__, {"os": "CentOS"} - ): - result = shadow.set_password(user, password, use_usermod=False) - - assert result - filehandles = shadow_mock.filehandles["/etc/shadow"] - # We should only have opened twice, once to read the contents and once - # to write. - assert len(filehandles) == 2 - # We're rewriting the entire file - assert filehandles[1].mode == "w+" - # We should be calling writelines instead of write, to rewrite the - # entire file. - assert len(filehandles[1].writelines_calls) == 1 - # Make sure we wrote the correct info - lines = filehandles[1].writelines_calls[0] - # Should only have the same three users in the file - assert len(lines) == 3 - # The first and third line should be unchanged - assert lines[0] == original_lines[0] - assert lines[2] == original_lines[2] - # The second line should have the new password hash, and it should have - # gotten "fixed" by adding another colon. - fixed = lines[1].split(":") - assert fixed[:2] == [user, password] - assert len(fixed) == 9 - - # - # CASE 2: Fix an entry with too many fields - # - user = "toolong" - user_exists_mock = MagicMock( - side_effect=lambda x, **y: 0 if x == ["id", user] else DEFAULT - ) - with patch( - "salt.utils.files.fopen", mock_open(read_data=data) - ) as shadow_mock, patch("os.path.isfile", isfile_mock), patch.object( - shadow, "info", shadow_info_mock - ), patch.dict( - shadow.__salt__, {"cmd.retcode": user_exists_mock} - ), patch.dict( - shadow.__grains__, {"os": "CentOS"} - ): - result = shadow.set_password(user, password, use_usermod=False) - - assert result - filehandles = shadow_mock.filehandles["/etc/shadow"] - # We should only have opened twice, once to read the contents and once - # to write. - assert len(filehandles) == 2 - # We're rewriting the entire file - assert filehandles[1].mode == "w+" - # We should be calling writelines instead of write, to rewrite the - # entire file. - assert len(filehandles[1].writelines_calls) == 1 - # Make sure we wrote the correct info - lines = filehandles[1].writelines_calls[0] - # Should only have the same three users in the file - assert len(lines) == 3 - # The first and second line should be unchanged - assert lines[0] == original_lines[0] - assert lines[1] == original_lines[1] - # The third line should have the new password hash, and it should have - # gotten "fixed" by reducing it to 9 fields instead of 10. - fixed = lines[2].split(":") - assert fixed[:2] == [user, password] - assert len(fixed) == 9 - - @pytest.mark.skip_if_not_root - def test_list_users(self): - """ - Test if it returns a list of all users - """ - self.assertTrue(shadow.list_users()) diff --git a/tests/unit/modules/test_nacl.py b/tests/unit/modules/test_nacl.py deleted file mode 100644 index c283f264e565..000000000000 --- a/tests/unit/modules/test_nacl.py +++ /dev/null @@ -1,91 +0,0 @@ -""" -Tests for the nacl execution module -""" - -import sys - -import pytest - -import salt.utils.stringutils -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.unit import TestCase - -try: - import libnacl.sealed # pylint: disable=unused-import - import libnacl.secret # pylint: disable=unused-import - - import salt.modules.nacl as nacl - - HAS_LIBNACL = True -except (ImportError, OSError, AttributeError): - HAS_LIBNACL = False - - -@pytest.mark.skipif(sys.version_info >= (3, 10), reason="Segfaults with python 3.10") -@pytest.mark.skipif( - not HAS_LIBNACL, reason="skipping test_nacl, reason=libnacl is unavailable" -) -class NaclTest(TestCase, LoaderModuleMockMixin): - """ - Test the nacl runner - """ - - def setup_loader_modules(self): - self.unencrypted_data = salt.utils.stringutils.to_bytes("hello") - self.opts = salt.config.DEFAULT_MINION_OPTS.copy() - utils = salt.loader.utils(self.opts) - funcs = salt.loader.minion_mods(self.opts, utils=utils, whitelist=["nacl"]) - - return { - nacl: {"__opts__": self.opts, "__utils__": utils, "__salt__": funcs}, - } - - def setUp(self): - # Generate the keys - ret = nacl.keygen() - self.assertIn("pk", ret) - self.assertIn("sk", ret) - self.pk = ret["pk"] - self.sk = ret["sk"] - - def test_keygen(self): - """ - Test keygen - """ - self.assertEqual(len(self.pk), 44) - self.assertEqual(len(self.sk), 44) - - def test_enc_dec(self): - """ - Generate keys, encrypt, then decrypt. - """ - # Encrypt with pk - encrypted_data = nacl.enc(data=self.unencrypted_data, pk=self.pk) - - # Decrypt with sk - decrypted_data = nacl.dec(data=encrypted_data, sk=self.sk) - self.assertEqual(self.unencrypted_data, decrypted_data) - - def test_sealedbox_enc_dec(self): - """ - Generate keys, encrypt, then decrypt. - """ - # Encrypt with pk - encrypted_data = nacl.sealedbox_encrypt(data=self.unencrypted_data, pk=self.pk) - - # Decrypt with sk - decrypted_data = nacl.sealedbox_decrypt(data=encrypted_data, sk=self.sk) - - self.assertEqual(self.unencrypted_data, decrypted_data) - - def test_secretbox_enc_dec(self): - """ - Generate keys, encrypt, then decrypt. - """ - # Encrypt with sk - encrypted_data = nacl.secretbox_encrypt(data=self.unencrypted_data, sk=self.sk) - - # Decrypt with sk - decrypted_data = nacl.secretbox_decrypt(data=encrypted_data, sk=self.sk) - - self.assertEqual(self.unencrypted_data, decrypted_data) diff --git a/tests/unit/modules/test_nilrt_ip.py b/tests/unit/modules/test_nilrt_ip.py index 1261473edb45..50dc13b20b80 100644 --- a/tests/unit/modules/test_nilrt_ip.py +++ b/tests/unit/modules/test_nilrt_ip.py @@ -28,7 +28,7 @@ def test_change_state_down_state(self): "salt.modules.nilrt_ip._change_dhcp_config", return_value=True ) as change_dhcp_config_mock: assert nilrt_ip._change_state("test_interface", "down") - assert change_dhcp_config_mock.called_with("test_interface", False) + change_dhcp_config_mock.assert_called_with("test_interface", False) def test_change_state_up_state(self): """ @@ -42,7 +42,7 @@ def test_change_state_up_state(self): "salt.modules.nilrt_ip._change_dhcp_config", return_value=True ) as change_dhcp_config_mock: assert nilrt_ip._change_state("test_interface", "up") - assert change_dhcp_config_mock.called_with("test_interface") + change_dhcp_config_mock.assert_called_with("test_interface") def test_set_static_all_with_dns(self): """ diff --git a/tests/unit/modules/test_npm.py b/tests/unit/modules/test_npm.py deleted file mode 100644 index 97e7adabe408..000000000000 --- a/tests/unit/modules/test_npm.py +++ /dev/null @@ -1,195 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - - -import textwrap - -import salt.modules.npm as npm -import salt.utils.json -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class NpmTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.npm - """ - - def setup_loader_modules(self): - patcher = patch( - "salt.modules.npm._check_valid_version", MagicMock(return_value=True) - ) - patcher.start() - self.addCleanup(patcher.stop) - return {npm: {}} - - # 'install' function tests: 4 - - def test_install(self): - """ - Test if it installs an NPM package. - """ - mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertRaises(CommandExecutionError, npm.install, "coffee-script") - - # This is at least somewhat closer to the actual output format. - mock_json_out = textwrap.dedent( - """\ - [ - { - "salt": "SALT" - } - ]""" - ) - - # Successful run, expected output format - mock = MagicMock( - return_value={"retcode": 0, "stderr": "", "stdout": mock_json_out} - ) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertEqual(npm.install("coffee-script"), [{"salt": "SALT"}]) - - mock_json_out_extra = textwrap.dedent( - """\ - Compilation output here - - [bcrypt] Success: "/tmp/node_modules/bcrypt/foo" is installed via remote" - [grpc] Success: "/usr/lib/node_modules/@foo/bar" is installed via remote" - [ - { - "from" : "express@", - "name" : "express", - "dependencies" : { - "escape-html" : { - "from" : "escape-html@~1.0.3", - "dependencies" : {}, - "version" : "1.0.3" - } - }, - "version" : "4.16.3" - } - ]""" - ) - extra_expected = [ - { - "dependencies": { - "escape-html": { - "dependencies": {}, - "from": "escape-html@~1.0.3", - "version": "1.0.3", - } - }, - "from": "express@", - "name": "express", - "version": "4.16.3", - } - ] - - # Successful run, expected output format with additional leading text - mock = MagicMock( - return_value={"retcode": 0, "stderr": "", "stdout": mock_json_out_extra} - ) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertEqual(npm.install("coffee-script"), extra_expected) - - # Successful run, unexpected output format - mock = MagicMock(return_value={"retcode": 0, "stderr": "", "stdout": "SALT"}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - mock_err = MagicMock(side_effect=ValueError()) - # When JSON isn't successfully parsed, return should equal input - with patch.object(salt.utils.json, "loads", mock_err): - self.assertEqual(npm.install("coffee-script"), "SALT") - - # 'uninstall' function tests: 1 - - def test_uninstall(self): - """ - Test if it uninstalls an NPM package. - """ - mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertFalse(npm.uninstall("coffee-script")) - - mock = MagicMock(return_value={"retcode": 0, "stderr": ""}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertTrue(npm.uninstall("coffee-script")) - - # 'list_' function tests: 1 - - def test_list(self): - """ - Test if it list installed NPM packages. - """ - mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertRaises(CommandExecutionError, npm.list_, "coffee-script") - - mock = MagicMock( - return_value={ - "retcode": 0, - "stderr": "error", - "stdout": '{"salt": ["SALT"]}', - } - ) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - mock_err = MagicMock(return_value={"dependencies": "SALT"}) - with patch.object(salt.utils.json, "loads", mock_err): - self.assertEqual(npm.list_("coffee-script"), "SALT") - - # 'cache_clean' function tests: 1 - - def test_cache_clean(self): - """ - Test if it cleans the cached NPM packages. - """ - mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertFalse(npm.cache_clean()) - - mock = MagicMock(return_value={"retcode": 0}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertTrue(npm.cache_clean()) - - mock = MagicMock(return_value={"retcode": 0}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertTrue(npm.cache_clean("coffee-script")) - - # 'cache_list' function tests: 1 - - def test_cache_list(self): - """ - Test if it lists the NPM cache. - """ - mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertRaises(CommandExecutionError, npm.cache_list) - - mock = MagicMock( - return_value={"retcode": 0, "stderr": "error", "stdout": ["~/.npm"]} - ) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertEqual(npm.cache_list(), ["~/.npm"]) - - mock = MagicMock(return_value={"retcode": 0, "stderr": "error", "stdout": ""}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertEqual(npm.cache_list("coffee-script"), "") - - # 'cache_path' function tests: 1 - - def test_cache_path(self): - """ - Test if it prints the NPM cache path. - """ - mock = MagicMock(return_value={"retcode": 1, "stderr": "error"}) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertEqual(npm.cache_path(), "error") - - mock = MagicMock( - return_value={"retcode": 0, "stderr": "error", "stdout": "/User/salt/.npm"} - ) - with patch.dict(npm.__salt__, {"cmd.run_all": mock}): - self.assertEqual(npm.cache_path(), "/User/salt/.npm") diff --git a/tests/unit/modules/test_openscap.py b/tests/unit/modules/test_openscap.py deleted file mode 100644 index 045c37f7c9bf..000000000000 --- a/tests/unit/modules/test_openscap.py +++ /dev/null @@ -1,213 +0,0 @@ -import subprocess - -import salt.modules.openscap as openscap -from tests.support.mock import MagicMock, Mock, patch -from tests.support.unit import TestCase - - -class OpenscapTestCase(TestCase): - - random_temp_dir = "/tmp/unique-name" - policy_file = "/usr/share/openscap/policy-file-xccdf.xml" - - def setUp(self): - import salt.modules.openscap - - salt.modules.openscap.__salt__ = MagicMock() - patchers = [ - patch("salt.modules.openscap.__salt__", MagicMock()), - patch("salt.modules.openscap.shutil.rmtree", Mock()), - patch( - "salt.modules.openscap.tempfile.mkdtemp", - Mock(return_value=self.random_temp_dir), - ), - ] - for patcher in patchers: - self.apply_patch(patcher) - - def apply_patch(self, patcher): - patcher.start() - self.addCleanup(patcher.stop) - - def test_openscap_xccdf_eval_success(self): - with patch( - "salt.modules.openscap.Popen", - MagicMock( - return_value=Mock( - **{"returncode": 0, "communicate.return_value": ("", "")} - ) - ), - ): - response = openscap.xccdf( - "eval --profile Default {}".format(self.policy_file) - ) - - self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1) - expected_cmd = [ - "oscap", - "xccdf", - "eval", - "--oval-results", - "--results", - "results.xml", - "--report", - "report.html", - "--profile", - "Default", - self.policy_file, - ] - openscap.Popen.assert_called_once_with( - expected_cmd, - cwd=openscap.tempfile.mkdtemp.return_value, - stderr=subprocess.PIPE, - stdout=subprocess.PIPE, - ) - openscap.__salt__["cp.push_dir"].assert_called_once_with( - self.random_temp_dir - ) - self.assertEqual(openscap.shutil.rmtree.call_count, 1) - self.assertEqual( - response, - { - "upload_dir": self.random_temp_dir, - "error": "", - "success": True, - "returncode": 0, - }, - ) - - def test_openscap_xccdf_eval_success_with_failing_rules(self): - with patch( - "salt.modules.openscap.Popen", - MagicMock( - return_value=Mock( - **{"returncode": 2, "communicate.return_value": ("", "some error")} - ) - ), - ): - response = openscap.xccdf( - "eval --profile Default {}".format(self.policy_file) - ) - - self.assertEqual(openscap.tempfile.mkdtemp.call_count, 1) - expected_cmd = [ - "oscap", - "xccdf", - "eval", - "--oval-results", - "--results", - "results.xml", - "--report", - "report.html", - "--profile", - "Default", - self.policy_file, - ] - openscap.Popen.assert_called_once_with( - expected_cmd, - cwd=openscap.tempfile.mkdtemp.return_value, - stderr=subprocess.PIPE, - stdout=subprocess.PIPE, - ) - openscap.__salt__["cp.push_dir"].assert_called_once_with( - self.random_temp_dir - ) - self.assertEqual(openscap.shutil.rmtree.call_count, 1) - self.assertEqual( - response, - { - "upload_dir": self.random_temp_dir, - "error": "some error", - "success": True, - "returncode": 2, - }, - ) - - def test_openscap_xccdf_eval_fail_no_profile(self): - response = openscap.xccdf("eval --param Default /unknown/param") - error = "the following arguments are required: --profile" - self.assertEqual( - response, - {"error": error, "upload_dir": None, "success": False, "returncode": None}, - ) - - def test_openscap_xccdf_eval_success_ignore_unknown_params(self): - with patch( - "salt.modules.openscap.Popen", - MagicMock( - return_value=Mock( - **{"returncode": 2, "communicate.return_value": ("", "some error")} - ) - ), - ): - response = openscap.xccdf( - "eval --profile Default --param Default /policy/file" - ) - self.assertEqual( - response, - { - "upload_dir": self.random_temp_dir, - "error": "some error", - "success": True, - "returncode": 2, - }, - ) - expected_cmd = [ - "oscap", - "xccdf", - "eval", - "--oval-results", - "--results", - "results.xml", - "--report", - "report.html", - "--profile", - "Default", - "/policy/file", - ] - openscap.Popen.assert_called_once_with( - expected_cmd, - cwd=openscap.tempfile.mkdtemp.return_value, - stderr=subprocess.PIPE, - stdout=subprocess.PIPE, - ) - - def test_openscap_xccdf_eval_evaluation_error(self): - with patch( - "salt.modules.openscap.Popen", - MagicMock( - return_value=Mock( - **{ - "returncode": 1, - "communicate.return_value": ("", "evaluation error"), - } - ) - ), - ): - response = openscap.xccdf( - "eval --profile Default {}".format(self.policy_file) - ) - - self.assertEqual( - response, - { - "upload_dir": None, - "error": "evaluation error", - "success": False, - "returncode": 1, - }, - ) - - def test_openscap_xccdf_eval_fail_not_implemented_action(self): - response = openscap.xccdf("info {}".format(self.policy_file)) - mock_err = "argument action: invalid choice: 'info' (choose from 'eval')" - - self.assertEqual( - response, - { - "upload_dir": None, - "error": mock_err, - "success": False, - "returncode": None, - }, - ) diff --git a/tests/unit/modules/test_postgres.py b/tests/unit/modules/test_postgres.py deleted file mode 100644 index 6f77fc039029..000000000000 --- a/tests/unit/modules/test_postgres.py +++ /dev/null @@ -1,2086 +0,0 @@ -import datetime -import logging -import re - -import salt.modules.postgres as postgres -from salt.exceptions import SaltInvocationError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import Mock, call, patch -from tests.support.unit import TestCase - -test_list_db_csv = ( - "Name,Owner,Encoding,Collate,Ctype,Access privileges,Tablespace\n" - "template1,postgres,LATIN1,en_US,en_US" - ',"{=c/postgres,postgres=CTc/postgres}",pg_default\n' - "template0,postgres,LATIN1,en_US,en_US" - ',"{=c/postgres,postgres=CTc/postgres}",pg_default\n' - "postgres,postgres,LATIN1,en_US,en_US,,pg_default\n" - "test_db,postgres,LATIN1,en_US,en_US,,pg_default" -) - -test_list_schema_csv = ( - "name,owner,acl\n" - 'public,postgres,"{postgres=UC/postgres,=UC/postgres}"\n' - 'pg_toast,postgres,""' -) - -test_list_language_csv = "Name\ninternal\nc\nsql\nplpgsql\n" - -test_privileges_list_table_csv = ( - "name\n" - '"{baruwatest=arwdDxt/baruwatest,bayestest=arwd/baruwatest,baruwa=a*r*w*d*D*x*t*/baruwatest}"\n' -) - -test_privileges_list_group_csv = ( - "rolname,admin_option\nbaruwa,f\nbaruwatest2,t\nbaruwatest,f\n" -) - -log = logging.getLogger(__name__) - - -class PostgresTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - patcher = patch("salt.utils.path.which", Mock(return_value="/usr/bin/pgsql")) - patcher.start() - self.addCleanup(patcher.stop) - return { - postgres: { - "__grains__": {"os_family": "Linux"}, - "__salt__": { - "config.option": Mock(), - "cmd.run_all": Mock(), - "file.chown": Mock(), - "file.remove": Mock(), - }, - } - } - - def test_run_psql(self): - postgres._run_psql('echo "hi"') - cmd = postgres.__salt__["cmd.run_all"] - - self.assertEqual("postgres", cmd.call_args[1]["runas"]) - - def test_db_alter(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - postgres.db_alter( - "dbname", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - tablespace="testspace", - owner="otheruser", - runas="foo", - ) - postgres._run_psql.assert_has_calls( - [ - call( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'ALTER DATABASE "dbname" OWNER TO "otheruser"', - ], - host="testhost", - user="testuser", - password="foo", - runas="foo", - port="testport", - ), - call( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'ALTER DATABASE "dbname" SET TABLESPACE "testspace"', - ], - host="testhost", - user="testuser", - password="foo", - runas="foo", - port="testport", - ), - ] - ) - - def test_db_alter_owner_recurse(self): - with patch( - "salt.modules.postgres.owner_to", Mock(return_value={"retcode": None}) - ): - postgres.db_alter( - "dbname", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - tablespace="testspace", - owner="otheruser", - owner_recurse=True, - runas="foo", - ) - postgres.owner_to.assert_called_once_with( - "dbname", - "otheruser", - user="testuser", - host="testhost", - port="testport", - password="foo", - runas="foo", - ) - - def test_db_create(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - postgres.db_create( - "dbname", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - tablespace="testspace", - owner="otheruser", - runas="foo", - ) - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'CREATE DATABASE "dbname" WITH TABLESPACE = "testspace" ' - 'OWNER = "otheruser"', - ], - host="testhost", - user="testuser", - password="foo", - runas="foo", - port="testport", - ) - - def test_db_create_empty_string_param(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - postgres.db_create( - "dbname", - lc_collate="", - encoding="utf8", - user="testuser", - host="testhost", - port=1234, - maintenance_db="maint_db", - password="foo", - ) - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "1234", - "--dbname", - "maint_db", - "-c", - "CREATE DATABASE \"dbname\" WITH ENCODING = 'utf8' LC_COLLATE = ''", - ], - host="testhost", - password="foo", - port=1234, - runas=None, - user="testuser", - ) - - def test_db_create_with_trivial_sql_injection(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - self.assertRaises( - SaltInvocationError, - postgres.db_create, - "dbname", - lc_collate="foo' ENCODING='utf8", - ) - - def test_db_exists(self): - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": test_list_db_csv}), - ): - ret = postgres.db_exists( - "test_db", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - runas="foo", - ) - self.assertTrue(ret) - - def test_db_list(self): - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": test_list_db_csv}), - ): - ret = postgres.db_list( - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - runas="foo", - ) - self.assertDictEqual( - ret, - { - "test_db": { - "Encoding": "LATIN1", - "Ctype": "en_US", - "Tablespace": "pg_default", - "Collate": "en_US", - "Owner": "postgres", - "Access privileges": "", - }, - "template1": { - "Encoding": "LATIN1", - "Ctype": "en_US", - "Tablespace": "pg_default", - "Collate": "en_US", - "Owner": "postgres", - "Access privileges": "{=c/postgres,postgres=CTc/postgres}", - }, - "template0": { - "Encoding": "LATIN1", - "Ctype": "en_US", - "Tablespace": "pg_default", - "Collate": "en_US", - "Owner": "postgres", - "Access privileges": "{=c/postgres,postgres=CTc/postgres}", - }, - "postgres": { - "Encoding": "LATIN1", - "Ctype": "en_US", - "Tablespace": "pg_default", - "Collate": "en_US", - "Owner": "postgres", - "Access privileges": "", - }, - }, - ) - - def test_db_remove(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - postgres.db_remove( - "test_db", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - runas="foo", - ) - - calls = ( - call( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'REVOKE CONNECT ON DATABASE "test_db" FROM public;', - ], - host="testhost", - password="foo", - port="testport", - runas="foo", - user="testuser", - ), - call( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - "SELECT pid, pg_terminate_backend(pid) FROM pg_stat_activity" - " WHERE datname = 'test_db' AND pid <> pg_backend_pid();", - ], - host="testhost", - password="foo", - port="testport", - runas="foo", - user="testuser", - ), - call( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'DROP DATABASE "test_db";', - ], - host="testhost", - password="foo", - port="testport", - runas="foo", - user="testuser", - ), - ) - - postgres._run_psql.assert_has_calls(calls, any_order=True) - - def test_group_create(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.user_exists", Mock(return_value=False)): - postgres.group_create( - "testgroup", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - createdb=False, - encrypted=False, - superuser=False, - replication=False, - rolepassword="testrolepass", - groups="testgroup", - runas="foo", - ) - # postgres._run_psql.call_args[0][0] will contain the list of CLI args. - # The first 14 elements of this list are initial args used in all (or - # virtually all) commands run through _run_psql(), so the actual SQL - # query will be in the 15th argument. - self.assertTrue( - postgres._run_psql.call_args[0][0][14].startswith("CREATE ROLE") - ) - - def test_group_remove(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.user_exists", Mock(return_value=True)): - postgres.group_remove( - "testgroup", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - runas="foo", - ) - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'DROP ROLE "testgroup"', - ], - host="testhost", - user="testuser", - password="foo", - runas="foo", - port="testport", - ) - - def test_group_update(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.role_get", - Mock(return_value={"superuser": False}), - ): - postgres.group_update( - "testgroup", - user='"testuser"', - host="testhost", - port="testport", - maintenance_db="maint_db", - password="foo", - createdb=False, - encrypted=False, - replication=False, - rolepassword="test_role_pass", - groups="testgroup", - runas="foo", - ) - # postgres._run_psql.call_args[0][0] will contain the list of CLI args. - # The first 14 elements of this list are initial args used in all (or - # virtually all) commands run through _run_psql(), so the actual SQL - # query will be in the 15th argument. - self.assertTrue( - re.match( - 'ALTER.* "testgroup" .* UNENCRYPTED PASSWORD', - postgres._run_psql.call_args[0][0][14], - ) - ) - - def test_user_create(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.user_exists", Mock(return_value=False)): - postgres.user_create( - "testuser", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_test", - password="test_pass", - login=True, - createdb=False, - createroles=False, - encrypted=False, - superuser=False, - replication=False, - rolepassword="test_role_pass", - valid_until="2042-07-01", - groups="test_groups", - runas="foo", - ) - # postgres._run_psql.call_args[0][0] will contain the list of CLI args. - # The first 14 elements of this list are initial args used in all (or - # virtually all) commands run through _run_psql(), so the actual SQL - # query will be in the 15th argument. - call = postgres._run_psql.call_args[0][0][14] - self.assertTrue(re.match('CREATE ROLE "testuser"', call)) - for i in ( - "INHERIT", - "NOCREATEDB", - "NOCREATEROLE", - "NOSUPERUSER", - "NOREPLICATION", - "LOGIN", - "UNENCRYPTED", - "PASSWORD", - "VALID UNTIL", - ): - self.assertTrue(i in call, "{} not in {}".format(i, call)) - - def test_user_exists(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.version", Mock(return_value="9.1")): - with patch( - "salt.modules.postgres.psql_query", - Mock( - return_value=[ - { - "name": "test_user", - "superuser": "t", - "inherits privileges": "t", - "can create roles": "t", - "can create databases": "t", - "can update system catalogs": "t", - "can login": "t", - "replication": None, - "password": "test_password", - "connections": "-1", - "groups": "", - "expiry time": "", - "defaults variables": None, - } - ] - ), - ): - ret = postgres.user_exists( - "test_user", - user="test_user", - host="test_host", - port="test_port", - maintenance_db="maint_db", - password="test_password", - runas="foo", - ) - self.assertTrue(ret) - - def test_user_list(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.version", Mock(return_value="9.1")): - with patch( - "salt.modules.postgres.psql_query", - Mock( - return_value=[ - { - "name": "test_user", - "superuser": "t", - "inherits privileges": "t", - "can create roles": "t", - "can create databases": "t", - "can update system catalogs": "t", - "can login": "t", - "replication": None, - "connections": "-1", - "groups": "", - "expiry time": "2017-08-16 08:57:46", - "defaults variables": None, - } - ] - ), - ): - ret = postgres.user_list( - "test_user", - host="test_host", - port="test_port", - maintenance_db="maint_db", - password="test_password", - runas="foo", - ) - - self.assertDictEqual( - ret, - { - "test_user": { - "superuser": True, - "defaults variables": None, - "can create databases": True, - "can create roles": True, - "connections": None, - "replication": None, - "expiry time": datetime.datetime( - 2017, 8, 16, 8, 57, 46 - ), - "can login": True, - "can update system catalogs": True, - "groups": [], - "inherits privileges": True, - } - }, - ) - - def test_user_remove(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.version", Mock(return_value="9.1")): - with patch( - "salt.modules.postgres.user_exists", Mock(return_value=True) - ): - postgres.user_remove( - "testuser", - user="testuser", - host="testhost", - port="testport", - maintenance_db="maint_db", - password="testpassword", - runas="foo", - ) - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'DROP ROLE "testuser"', - ], - host="testhost", - port="testport", - user="testuser", - password="testpassword", - runas="foo", - ) - - def test_user_update(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.role_get", - Mock(return_value={"superuser": False}), - ): - postgres.user_update( - "test_username", - user="test_user", - host="test_host", - port="test_port", - maintenance_db="test_maint", - password="test_pass", - createdb=False, - createroles=False, - encrypted=False, - inherit=True, - login=True, - replication=False, - rolepassword="test_role_pass", - valid_until="2017-07-01", - groups="test_groups", - runas="foo", - ) - # postgres._run_psql.call_args[0][0] will contain the list of CLI args. - # The first 14 elements of this list are initial args used in all (or - # virtually all) commands run through _run_psql(), so the actual SQL - # query will be in the 15th argument. - self.assertTrue( - re.match( - 'ALTER ROLE "test_username" WITH INHERIT NOCREATEDB ' - "NOCREATEROLE NOREPLICATION LOGIN " - "UNENCRYPTED PASSWORD ['\"]{0,5}test_role_pass['\"]{0,5} " - "VALID UNTIL '2017-07-01';" - ' GRANT "test_groups" TO "test_username"', - postgres._run_psql.call_args[0][0][14], - ) - ) - - def test_user_update2(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.role_get", - Mock(return_value={"superuser": False}), - ): - postgres.user_update( - "test_username", - user="test_user", - host="test_host", - port="test_port", - maintenance_db="test_maint", - password="test_pass", - createdb=False, - createroles=True, - encrypted=False, - inherit=True, - login=True, - replication=False, - groups="test_groups", - runas="foo", - ) - # postgres._run_psql.call_args[0][0] will contain the list of CLI args. - # The first 14 elements of this list are initial args used in all (or - # virtually all) commands run through _run_psql(), so the actual SQL - # query will be in the 15th argument. - self.assertTrue( - re.match( - 'ALTER ROLE "test_username" WITH INHERIT NOCREATEDB ' - "CREATEROLE NOREPLICATION LOGIN;" - ' GRANT "test_groups" TO "test_username"', - postgres._run_psql.call_args[0][0][14], - ) - ) - - def test_user_update3(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.role_get", - Mock(return_value={"superuser": False}), - ): - postgres.user_update( - "test_username", - user="test_user", - host="test_host", - port="test_port", - maintenance_db="test_maint", - password="test_pass", - createdb=False, - createroles=True, - encrypted=False, - inherit=True, - login=True, - rolepassword=False, - replication=False, - groups="test_groups", - runas="foo", - ) - # postgres._run_psql.call_args[0][0] will contain the list of CLI args. - # The first 14 elements of this list are initial args used in all (or - # virtually all) commands run through _run_psql(), so the actual SQL - # query will be in the 15th argument. - self.assertTrue( - re.match( - 'ALTER ROLE "test_username" WITH INHERIT NOCREATEDB ' - "CREATEROLE NOREPLICATION LOGIN NOPASSWORD;" - ' GRANT "test_groups" TO "test_username"', - postgres._run_psql.call_args[0][0][14], - ) - ) - - def test_user_update_encrypted_passwd(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.role_get", - Mock(return_value={"superuser": False}), - ): - postgres.user_update( - "test_username", - user="test_user", - host="test_host", - port="test_port", - maintenance_db="test_maint", - password="test_pass", - createdb=False, - createroles=True, - encrypted=True, - inherit=True, - login=True, - rolepassword="foobar", - replication=False, - groups="test_groups", - runas="foo", - ) - # postgres._run_psql.call_args[0][0] will contain the list of CLI args. - # The first 14 elements of this list are initial args used in all (or - # virtually all) commands run through _run_psql(), so the actual SQL - # query will be in the 15th argument. - self.assertTrue( - re.match( - 'ALTER ROLE "test_username" WITH INHERIT NOCREATEDB ' - "CREATEROLE NOREPLICATION LOGIN " - "ENCRYPTED PASSWORD " - "['\"]{0,5}md531c27e68d3771c392b52102c01be1da1['\"]{0,5}" - '; GRANT "test_groups" TO "test_username"', - postgres._run_psql.call_args[0][0][14], - ) - ) - - def test_version(self): - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": "9.1.9"}), - ): - postgres.version( - user="test_user", - host="test_host", - port="test_port", - maintenance_db="test_maint", - password="test_pass", - runas="foo", - ) - # postgres._run_psql.call_args[0][0] will contain the list of CLI args. - # The first 14 elements of this list are initial args used in all (or - # virtually all) commands run through _run_psql(), so the actual SQL - # query will be in the 15th argument. - self.assertTrue( - re.match( - "SELECT setting FROM pg_catalog.pg_settings", - postgres._run_psql.call_args[0][0][14], - ) - ) - - def test_installed_extensions(self): - with patch( - "salt.modules.postgres.psql_query", - Mock(return_value=[{"extname": "foo", "extversion": "1"}]), - ): - exts = postgres.installed_extensions() - self.assertEqual(exts, {"foo": {"extversion": "1", "extname": "foo"}}) - - def test_available_extensions(self): - with patch( - "salt.modules.postgres.psql_query", - Mock(return_value=[{"name": "foo", "default_version": "1"}]), - ): - exts = postgres.available_extensions() - self.assertEqual(exts, {"foo": {"default_version": "1", "name": "foo"}}) - - def test_drop_extension2(self): - with patch( - "salt.modules.postgres.installed_extensions", Mock(side_effect=[{}, {}]) - ): - with patch( - "salt.modules.postgres._psql_prepare_and_run", Mock(return_value=None) - ): - with patch( - "salt.modules.postgres.available_extensions", - Mock(return_value={"foo": {"default_version": "1", "name": "foo"}}), - ): - self.assertEqual(postgres.drop_extension("foo"), True) - - def test_drop_extension3(self): - with patch( - "salt.modules.postgres.installed_extensions", - Mock(side_effect=[{"foo": {"extversion": "1", "extname": "foo"}}, {}]), - ): - with patch( - "salt.modules.postgres._psql_prepare_and_run", Mock(return_value=None) - ): - with patch( - "salt.modules.postgres.available_extensions", - Mock(return_value={"foo": {"default_version": "1", "name": "foo"}}), - ): - self.assertEqual(postgres.drop_extension("foo"), True) - - def test_drop_extension1(self): - with patch( - "salt.modules.postgres.installed_extensions", - Mock( - side_effect=[ - {"foo": {"extversion": "1", "extname": "foo"}}, - {"foo": {"extversion": "1", "extname": "foo"}}, - ] - ), - ): - with patch( - "salt.modules.postgres._psql_prepare_and_run", Mock(return_value=None) - ): - with patch( - "salt.modules.postgres.available_extensions", - Mock(return_value={"foo": {"default_version": "1", "name": "foo"}}), - ): - self.assertEqual(postgres.drop_extension("foo"), False) - - def test_create_mtdata(self): - with patch( - "salt.modules.postgres.installed_extensions", - Mock( - return_value={ - "foo": { - "extversion": "0.8", - "extrelocatable": "t", - "schema_name": "foo", - "extname": "foo", - } - }, - ), - ): - with patch( - "salt.modules.postgres.available_extensions", - Mock(return_value={"foo": {"default_version": "1.4", "name": "foo"}}), - ): - ret = postgres.create_metadata("foo", schema="bar", ext_version="1.4") - self.assertTrue(postgres._EXTENSION_INSTALLED in ret) - self.assertTrue(postgres._EXTENSION_TO_UPGRADE in ret) - self.assertTrue(postgres._EXTENSION_TO_MOVE in ret) - ret = postgres.create_metadata("foo", schema="foo", ext_version="0.4") - self.assertTrue(postgres._EXTENSION_INSTALLED in ret) - self.assertFalse(postgres._EXTENSION_TO_UPGRADE in ret) - self.assertFalse(postgres._EXTENSION_TO_MOVE in ret) - ret = postgres.create_metadata("foo") - self.assertTrue(postgres._EXTENSION_INSTALLED in ret) - self.assertFalse(postgres._EXTENSION_TO_UPGRADE in ret) - self.assertFalse(postgres._EXTENSION_TO_MOVE in ret) - ret = postgres.create_metadata("foobar") - self.assertTrue(postgres._EXTENSION_NOT_INSTALLED in ret) - self.assertFalse(postgres._EXTENSION_INSTALLED in ret) - self.assertFalse(postgres._EXTENSION_TO_UPGRADE in ret) - self.assertFalse(postgres._EXTENSION_TO_MOVE in ret) - - def test_create_extension_newerthan(self): - """ - scenario of creating upgrading extensions with possible schema and - version specifications - """ - with patch( - "salt.modules.postgres.create_metadata", - Mock( - side_effect=[ - # create succeeded - [postgres._EXTENSION_NOT_INSTALLED], - [postgres._EXTENSION_INSTALLED], - [postgres._EXTENSION_NOT_INSTALLED], - [postgres._EXTENSION_INSTALLED], - # create failed - [postgres._EXTENSION_NOT_INSTALLED], - [postgres._EXTENSION_NOT_INSTALLED], - # move+upgrade succeeded - [ - postgres._EXTENSION_TO_MOVE, - postgres._EXTENSION_TO_UPGRADE, - postgres._EXTENSION_INSTALLED, - ], - [postgres._EXTENSION_INSTALLED], - # move succeeded - [postgres._EXTENSION_TO_MOVE, postgres._EXTENSION_INSTALLED], - [postgres._EXTENSION_INSTALLED], - # upgrade succeeded - [postgres._EXTENSION_TO_UPGRADE, postgres._EXTENSION_INSTALLED], - [postgres._EXTENSION_INSTALLED], - # upgrade failed - [postgres._EXTENSION_TO_UPGRADE, postgres._EXTENSION_INSTALLED], - [postgres._EXTENSION_TO_UPGRADE, postgres._EXTENSION_INSTALLED], - # move failed - [postgres._EXTENSION_TO_MOVE, postgres._EXTENSION_INSTALLED], - [postgres._EXTENSION_TO_MOVE, postgres._EXTENSION_INSTALLED], - ] - ), - ): - with patch( - "salt.modules.postgres._psql_prepare_and_run", Mock(return_value=None) - ): - with patch( - "salt.modules.postgres.available_extensions", - Mock( - return_value={"foo": {"default_version": "1.4", "name": "foo"}} - ), - ): - self.assertTrue(postgres.create_extension("foo")) - self.assertTrue( - re.match( - 'CREATE EXTENSION IF NOT EXISTS "foo" ;', - postgres._psql_prepare_and_run.call_args[0][0][1], - ) - ) - self.assertTrue( - postgres.create_extension( - "foo", schema="a", ext_version="b", from_version="c" - ) - ) - self.assertTrue( - re.match( - 'CREATE EXTENSION IF NOT EXISTS "foo" ' - 'WITH SCHEMA "a" VERSION b FROM c ;', - postgres._psql_prepare_and_run.call_args[0][0][1], - ) - ) - self.assertFalse(postgres.create_extension("foo")) - ret = postgres.create_extension("foo", ext_version="a", schema="b") - self.assertTrue(ret) - self.assertTrue( - re.match( - 'ALTER EXTENSION "foo" SET SCHEMA "b";' - ' ALTER EXTENSION "foo" UPDATE TO a;', - postgres._psql_prepare_and_run.call_args[0][0][1], - ) - ) - ret = postgres.create_extension("foo", ext_version="a", schema="b") - self.assertTrue(ret) - self.assertTrue( - re.match( - 'ALTER EXTENSION "foo" SET SCHEMA "b";', - postgres._psql_prepare_and_run.call_args[0][0][1], - ) - ) - ret = postgres.create_extension("foo", ext_version="a", schema="b") - self.assertTrue(ret) - self.assertTrue( - re.match( - 'ALTER EXTENSION "foo" UPDATE TO a;', - postgres._psql_prepare_and_run.call_args[0][0][1], - ) - ) - self.assertFalse( - postgres.create_extension("foo", ext_version="a", schema="b") - ) - self.assertFalse( - postgres.create_extension("foo", ext_version="a", schema="b") - ) - - def test_encrypt_passwords(self): - self.assertEqual(postgres._maybe_encrypt_password("foo", "bar", False), "bar") - self.assertEqual( - postgres._maybe_encrypt_password("foo", "bar", True), - "md596948aad3fcae80c08a35c9b5958cd89", - ) - - def test_schema_list(self): - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": test_list_schema_csv}), - ): - ret = postgres.schema_list( - "maint_db", - db_user="testuser", - db_host="testhost", - db_port="testport", - db_password="foo", - ) - self.assertDictEqual( - ret, - { - "public": { - "acl": "{postgres=UC/postgres,=UC/postgres}", - "owner": "postgres", - }, - "pg_toast": {"acl": "", "owner": "postgres"}, - }, - ) - - def test_schema_exists(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.psql_query", - Mock( - return_value=[ - { - "name": "public", - "acl": "{postgres=UC/postgres,=UC/postgres}", - "owner": "postgres", - } - ] - ), - ): - ret = postgres.schema_exists("template1", "public") - self.assertTrue(ret) - - def test_schema_get(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.psql_query", - Mock( - return_value=[ - { - "name": "public", - "acl": "{postgres=UC/postgres,=UC/postgres}", - "owner": "postgres", - } - ] - ), - ): - ret = postgres.schema_get("template1", "public") - self.assertTrue(ret) - - def test_schema_get_again(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.psql_query", - Mock( - return_value=[ - { - "name": "public", - "acl": "{postgres=UC/postgres,=UC/postgres}", - "owner": "postgres", - } - ] - ), - ): - ret = postgres.schema_get("template1", "pg_toast") - self.assertFalse(ret) - - def test_schema_create(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.schema_exists", Mock(return_value=False)): - postgres.schema_create( - "maint_db", - "testschema", - user="user", - db_host="testhost", - db_port="testport", - db_user="testuser", - db_password="testpassword", - ) - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'CREATE SCHEMA "testschema"', - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_schema_create2(self): - with patch("salt.modules.postgres.schema_exists", Mock(return_value=True)): - ret = postgres.schema_create( - "test_db", - "test_schema", - user="user", - db_host="test_host", - db_port="test_port", - db_user="test_user", - db_password="test_password", - ) - self.assertFalse(ret) - - def test_schema_remove(self): - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.schema_exists", Mock(return_value=True)): - postgres.schema_remove( - "maint_db", - "testschema", - user="user", - db_host="testhost", - db_port="testport", - db_user="testuser", - db_password="testpassword", - ) - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "maint_db", - "-c", - 'DROP SCHEMA "testschema"', - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_schema_remove2(self): - with patch("salt.modules.postgres.schema_exists", Mock(return_value=False)): - ret = postgres.schema_remove( - "test_db", - "test_schema", - user="user", - db_host="test_host", - db_port="test_port", - db_user="test_user", - db_password="test_password", - ) - self.assertFalse(ret) - - def test_language_list(self): - """ - Test language listing - """ - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": test_list_language_csv}), - ): - ret = postgres.language_list( - "testdb", - user="testuser", - host="testhost", - port="testport", - password="foo", - ) - self.assertDictEqual( - ret, - {"c": "c", "internal": "internal", "plpgsql": "plpgsql", "sql": "sql"}, - ) - - def test_language_exists(self): - """ - Test language existence check - """ - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.psql_query", - Mock( - return_value=[ - {"Name": "internal"}, - {"Name": "c"}, - {"Name": "sql"}, - {"Name": "plpgsql"}, - ] - ), - ): - with patch( - "salt.modules.postgres.language_exists", Mock(return_value=True) - ): - ret = postgres.language_exists("sql", "testdb") - self.assertTrue(ret) - - def test_language_create(self): - """ - Test language creation - does not exist in db - """ - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.language_exists", Mock(return_value=False) - ): - postgres.language_create( - "plpythonu", - "testdb", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "testdb", - "-c", - "CREATE LANGUAGE plpythonu", - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_language_create_exists(self): - """ - Test language creation - already exists in db - """ - with patch("salt.modules.postgres.language_exists", Mock(return_value=True)): - ret = postgres.language_create( - "plpythonu", - "testdb", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - self.assertFalse(ret) - - def test_language_remove(self): - """ - Test language removal - exists in db - """ - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.language_exists", Mock(return_value=True) - ): - postgres.language_remove( - "plpgsql", - "testdb", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "testdb", - "-c", - "DROP LANGUAGE plpgsql", - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_language_remove_non_exist(self): - """ - Test language removal - does not exist in db - """ - with patch("salt.modules.postgres.language_exists", Mock(return_value=False)): - ret = postgres.language_remove( - "plpgsql", - "testdb", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - self.assertFalse(ret) - - def test_privileges_list_table(self): - """ - Test privilege listing on a table - """ - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": test_privileges_list_table_csv}), - ): - ret = postgres.privileges_list( - "awl", - "table", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - expected = { - "bayestest": { - "INSERT": False, - "UPDATE": False, - "SELECT": False, - "DELETE": False, - }, - "baruwa": { - "INSERT": True, - "TRUNCATE": True, - "UPDATE": True, - "TRIGGER": True, - "REFERENCES": True, - "SELECT": True, - "DELETE": True, - }, - "baruwatest": { - "INSERT": False, - "TRUNCATE": False, - "UPDATE": False, - "TRIGGER": False, - "REFERENCES": False, - "SELECT": False, - "DELETE": False, - }, - } - - self.assertDictEqual(ret, expected) - - query = ( - "COPY (SELECT relacl AS name FROM pg_catalog.pg_class c " - "JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace " - "WHERE nspname = 'public' AND relname = 'awl' AND relkind in ('r', 'v') " - "ORDER BY relname) TO STDOUT WITH CSV HEADER" - ) - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-v", - "datestyle=ISO,MDY", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_privileges_list_group(self): - """ - Test privilege listing on a group - """ - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": test_privileges_list_group_csv}), - ): - ret = postgres.privileges_list( - "admin", - "group", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - expected = { - "baruwa": False, - "baruwatest": False, - "baruwatest2": True, - } - - self.assertDictEqual(ret, expected) - - query = ( - "COPY (SELECT rolname, admin_option " - "FROM pg_catalog.pg_auth_members m JOIN pg_catalog.pg_roles r " - "ON m.member=r.oid WHERE m.roleid IN (SELECT oid FROM " - "pg_catalog.pg_roles WHERE rolname='admin') ORDER BY rolname) " - "TO STDOUT WITH CSV HEADER" - ) - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-v", - "datestyle=ISO,MDY", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_has_privileges_on_table(self): - """ - Test privilege checks on table - """ - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": test_privileges_list_table_csv}), - ): - ret = postgres.has_privileges( - "baruwa", - "awl", - "table", - "SELECT,INSERT", - grant_option=True, - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - self.assertTrue(ret) - - ret = postgres.has_privileges( - "baruwa", - "awl", - "table", - "ALL", - grant_option=True, - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - self.assertTrue(ret) - - ret = postgres.has_privileges( - "baruwa", - "awl", - "table", - "ALL", - grant_option=False, - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - self.assertTrue(ret) - - ret = postgres.has_privileges( - "bayestest", - "awl", - "table", - "SELECT,INSERT,TRUNCATE", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - self.assertFalse(ret) - - ret = postgres.has_privileges( - "bayestest", - "awl", - "table", - "SELECT,INSERT", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - self.assertTrue(ret) - - def test_has_privileges_on_group(self): - """ - Test privilege checks on group - """ - with patch( - "salt.modules.postgres._run_psql", - Mock(return_value={"retcode": 0, "stdout": test_privileges_list_group_csv}), - ): - ret = postgres.has_privileges( - "baruwa", - "admin", - "group", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - self.assertTrue(ret) - - ret = postgres.has_privileges( - "baruwa", - "admin", - "group", - grant_option=True, - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - self.assertFalse(ret) - - ret = postgres.has_privileges( - "tony", - "admin", - "group", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - self.assertFalse(ret) - - def test_privileges_grant_table(self): - """ - Test granting privileges on table - """ - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.has_privileges", Mock(return_value=False) - ): - ret = postgres.privileges_grant( - "baruwa", - "awl", - "table", - "ALL", - grant_option=True, - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - query = 'GRANT ALL ON TABLE public."awl" TO "baruwa" WITH GRANT OPTION' - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.has_privileges", Mock(return_value=False) - ): - ret = postgres.privileges_grant( - "baruwa", - "awl", - "table", - "ALL", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - query = 'GRANT ALL ON TABLE public."awl" TO "baruwa"' - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - # Test grant on all tables - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.has_privileges", Mock(return_value=False) - ): - ret = postgres.privileges_grant( - "baruwa", - "ALL", - "table", - "SELECT", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - query = 'GRANT SELECT ON ALL TABLES IN SCHEMA public TO "baruwa"' - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_privileges_grant_group(self): - """ - Test granting privileges on group - """ - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.has_privileges", Mock(return_value=False) - ): - ret = postgres.privileges_grant( - "baruwa", - "admins", - "group", - grant_option=True, - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - query = 'GRANT admins TO "baruwa" WITH ADMIN OPTION' - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.has_privileges", Mock(return_value=False) - ): - ret = postgres.privileges_grant( - "baruwa", - "admins", - "group", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - query = 'GRANT admins TO "baruwa"' - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_privileges_revoke_table(self): - """ - Test revoking privileges on table - """ - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.has_privileges", Mock(return_value=True)): - ret = postgres.privileges_revoke( - "baruwa", - "awl", - "table", - "ALL", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - query = "REVOKE ALL ON TABLE public.awl FROM baruwa" - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_privileges_revoke_group(self): - """ - Test revoking privileges on group - """ - with patch( - "salt.modules.postgres._run_psql", Mock(return_value={"retcode": 0}) - ): - with patch("salt.modules.postgres.has_privileges", Mock(return_value=True)): - ret = postgres.privileges_revoke( - "baruwa", - "admins", - "group", - maintenance_db="db_name", - runas="user", - host="testhost", - port="testport", - user="testuser", - password="testpassword", - ) - - query = "REVOKE admins FROM baruwa" - - postgres._run_psql.assert_called_once_with( - [ - "/usr/bin/pgsql", - "--no-align", - "--no-readline", - "--no-psqlrc", - "--no-password", - "--username", - "testuser", - "--host", - "testhost", - "--port", - "testport", - "--dbname", - "db_name", - "-c", - query, - ], - host="testhost", - port="testport", - password="testpassword", - user="testuser", - runas="user", - ) - - def test_datadir_init(self): - """ - Test Initializing a postgres data directory - """ - with patch( - "salt.modules.postgres._run_initdb", Mock(return_value={"retcode": 0}) - ): - with patch( - "salt.modules.postgres.datadir_exists", Mock(return_value=False) - ): - name = "/var/lib/pgsql/data" - ret = postgres.datadir_init( - name, user="postgres", password="test", runas="postgres" - ) - postgres._run_initdb.assert_called_once_with( - name, - auth="password", - encoding="UTF8", - locale=None, - password="test", - runas="postgres", - checksums=False, - waldir=None, - user="postgres", - ) - self.assertTrue(ret) - - def test_datadir_exists(self): - """ - Test Checks if postgres data directory has been initialized - """ - with patch("os.path.isfile", Mock(return_value=True)): - name = "/var/lib/pgsql/data" - ret = postgres.datadir_exists(name) - self.assertTrue(ret) - - def test_pg_is_older_ext_ver(self): - """ - Test Checks if postgres extension version string is older - """ - self.assertTrue(postgres._pg_is_older_ext_ver("8.5", "9.5")) - self.assertTrue(postgres._pg_is_older_ext_ver("8.5", "8.6")) - self.assertTrue(postgres._pg_is_older_ext_ver("8.5.2", "8.5.3")) - self.assertFalse(postgres._pg_is_older_ext_ver("9.5", "8.5")) - self.assertTrue(postgres._pg_is_older_ext_ver("9.5", "9.6")) - self.assertTrue(postgres._pg_is_older_ext_ver("9.5.0", "9.5.1")) - self.assertTrue(postgres._pg_is_older_ext_ver("9.5", "9.5.1")) - self.assertFalse(postgres._pg_is_older_ext_ver("9.5.1", "9.5")) - self.assertFalse(postgres._pg_is_older_ext_ver("9.5b", "9.5a")) - self.assertTrue(postgres._pg_is_older_ext_ver("10a", "10b")) - self.assertTrue(postgres._pg_is_older_ext_ver("1.2.3.4", "1.2.3.5")) - self.assertTrue(postgres._pg_is_older_ext_ver("10dev", "10next")) - self.assertFalse(postgres._pg_is_older_ext_ver("10next", "10dev")) diff --git a/tests/unit/modules/test_redismod.py b/tests/unit/modules/test_redismod.py deleted file mode 100644 index a40f7155a331..000000000000 --- a/tests/unit/modules/test_redismod.py +++ /dev/null @@ -1,456 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -from datetime import datetime - -import salt.modules.redismod as redismod -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock -from tests.support.unit import TestCase - - -class Mockredis: - """ - Mock redis class - """ - - class ConnectionError(Exception): - """ - Mock ConnectionError class - """ - - -class MockConnect: - """ - Mock Connect class - """ - - counter = 0 - - def __init__(self): - self.name = None - self.pattern = None - self.value = None - self.key = None - self.seconds = None - self.timestamp = None - self.field = None - self.start = None - self.stop = None - self.master_host = None - self.master_port = None - - @staticmethod - def bgrewriteaof(): - """ - Mock bgrewriteaof method - """ - return "A" - - @staticmethod - def bgsave(): - """ - Mock bgsave method - """ - return "A" - - def config_get(self, pattern): - """ - Mock config_get method - """ - self.pattern = pattern - return "A" - - def config_set(self, name, value): - """ - Mock config_set method - """ - self.name = name - self.value = value - return "A" - - @staticmethod - def dbsize(): - """ - Mock dbsize method - """ - return "A" - - @staticmethod - def delete(): - """ - Mock delete method - """ - return "A" - - def exists(self, key): - """ - Mock exists method - """ - self.key = key - return "A" - - def expire(self, key, seconds): - """ - Mock expire method - """ - self.key = key - self.seconds = seconds - return "A" - - def expireat(self, key, timestamp): - """ - Mock expireat method - """ - self.key = key - self.timestamp = timestamp - return "A" - - @staticmethod - def flushall(): - """ - Mock flushall method - """ - return "A" - - @staticmethod - def flushdb(): - """ - Mock flushdb method - """ - return "A" - - def get(self, key): - """ - Mock get method - """ - self.key = key - return "A" - - def hget(self, key, field): - """ - Mock hget method - """ - self.key = key - self.field = field - return "A" - - def hgetall(self, key): - """ - Mock hgetall method - """ - self.key = key - return "A" - - @staticmethod - def info(): - """ - Mock info method - """ - return "A" - - def keys(self, pattern): - """ - Mock keys method - """ - self.pattern = pattern - return "A" - - def type(self, key): - """ - Mock type method - """ - self.key = key - return "A" - - @staticmethod - def lastsave(): - """ - Mock lastsave method - """ - return datetime.now() - - def llen(self, key): - """ - Mock llen method - """ - self.key = key - return "A" - - def lrange(self, key, start, stop): - """ - Mock lrange method - """ - self.key = key - self.start = start - self.stop = stop - return "A" - - @staticmethod - def ping(): - """ - Mock ping method - """ - MockConnect.counter = MockConnect.counter + 1 - if MockConnect.counter == 1: - return "A" - elif MockConnect.counter in (2, 3, 5): - raise Mockredis.ConnectionError("foo") - - @staticmethod - def save(): - """ - Mock save method - """ - return "A" - - def set(self, key, value): - """ - Mock set method - """ - self.key = key - self.value = value - return "A" - - @staticmethod - def shutdown(): - """ - Mock shutdown method - """ - return "A" - - def slaveof(self, master_host, master_port): - """ - Mock slaveof method - """ - self.master_host = master_host - self.master_port = master_port - return "A" - - def smembers(self, key): - """ - Mock smembers method - """ - self.key = key - return "A" - - @staticmethod - def time(): - """ - Mock time method - """ - return "A" - - def zcard(self, key): - """ - Mock zcard method - """ - self.key = key - return "A" - - def zrange(self, key, start, stop): - """ - Mock zrange method - """ - self.key = key - self.start = start - self.stop = stop - return "A" - - -class RedismodTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.redismod - """ - - def setup_loader_modules(self): - return { - redismod: { - "redis": Mockredis, - "_connect": MagicMock(return_value=MockConnect()), - } - } - - def test_bgrewriteaof(self): - """ - Test to asynchronously rewrite the append-only file - """ - self.assertEqual(redismod.bgrewriteaof(), "A") - - def test_bgsave(self): - """ - Test to asynchronously save the dataset to disk - """ - self.assertEqual(redismod.bgsave(), "A") - - def test_config_get(self): - """ - Test to get redis server configuration values - """ - self.assertEqual(redismod.config_get("*"), "A") - - def test_config_set(self): - """ - Test to set redis server configuration values - """ - self.assertEqual(redismod.config_set("name", "value"), "A") - - def test_dbsize(self): - """ - Test to return the number of keys in the selected database - """ - self.assertEqual(redismod.dbsize(), "A") - - def test_delete(self): - """ - Test to deletes the keys from redis, returns number of keys deleted - """ - self.assertEqual(redismod.delete(), "A") - - def test_exists(self): - """ - Test to return true if the key exists in redis - """ - self.assertEqual(redismod.exists("key"), "A") - - def test_expire(self): - """ - Test to set a keys time to live in seconds - """ - self.assertEqual(redismod.expire("key", "seconds"), "A") - - def test_expireat(self): - """ - Test to set a keys expire at given UNIX time - """ - self.assertEqual(redismod.expireat("key", "timestamp"), "A") - - def test_flushall(self): - """ - Test to remove all keys from all databases - """ - self.assertEqual(redismod.flushall(), "A") - - def test_flushdb(self): - """ - Test to remove all keys from the selected database - """ - self.assertEqual(redismod.flushdb(), "A") - - def test_get_key(self): - """ - Test to get redis key value - """ - self.assertEqual(redismod.get_key("key"), "A") - - def test_hget(self): - """ - Test to get specific field value from a redis hash, returns dict - """ - self.assertEqual(redismod.hget("key", "field"), "A") - - def test_hgetall(self): - """ - Test to get all fields and values from a redis hash, returns dict - """ - self.assertEqual(redismod.hgetall("key"), "A") - - def test_info(self): - """ - Test to get information and statistics about the server - """ - self.assertEqual(redismod.info(), "A") - - def test_keys(self): - """ - Test to get redis keys, supports glob style patterns - """ - self.assertEqual(redismod.keys("pattern"), "A") - - def test_key_type(self): - """ - Test to get redis key type - """ - self.assertEqual(redismod.key_type("key"), "A") - - def test_lastsave(self): - """ - Test to get the UNIX time in seconds of the last successful - save to disk - """ - self.assertTrue(redismod.lastsave()) - - def test_llen(self): - """ - Test to get the length of a list in Redis - """ - self.assertEqual(redismod.llen("key"), "A") - - def test_lrange(self): - """ - Test to get a range of values from a list in Redis - """ - self.assertEqual(redismod.lrange("key", "start", "stop"), "A") - - def test_ping(self): - """ - Test to ping the server, returns False on connection errors - """ - self.assertEqual(redismod.ping(), "A") - - self.assertFalse(redismod.ping()) - - def test_save(self): - """ - Test to synchronously save the dataset to disk - """ - self.assertEqual(redismod.save(), "A") - - def test_set_key(self): - """ - Test to set redis key value - """ - self.assertEqual(redismod.set_key("key", "value"), "A") - - def test_shutdown(self): - """ - Test to synchronously save the dataset to disk and then - shut down the server - """ - self.assertFalse(redismod.shutdown()) - - self.assertTrue(redismod.shutdown()) - - self.assertFalse(redismod.shutdown()) - - def test_slaveof(self): - """ - Test to make the server a slave of another instance, or - promote it as master - """ - self.assertEqual(redismod.slaveof("master_host", "master_port"), "A") - - def test_smembers(self): - """ - Test to get members in a Redis set - """ - self.assertListEqual(redismod.smembers("key"), ["A"]) - - def test_time(self): - """ - Test to return the current server UNIX time in seconds - """ - self.assertEqual(redismod.time(), "A") - - def test_zcard(self): - """ - Test to get the length of a sorted set in Redis - """ - self.assertEqual(redismod.zcard("key"), "A") - - def test_zrange(self): - """ - Test to get a range of values from a sorted set in Redis by index - """ - self.assertEqual(redismod.zrange("key", "start", "stop"), "A") diff --git a/tests/unit/modules/test_serverdensity_device.py b/tests/unit/modules/test_serverdensity_device.py deleted file mode 100644 index eeec3f1242ce..000000000000 --- a/tests/unit/modules/test_serverdensity_device.py +++ /dev/null @@ -1,216 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - - -import salt.modules.serverdensity_device as serverdensity_device -import salt.utils.json -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class MockRequests: - """ - Mock smtplib class - """ - - flag = None - content = """{"message": "Invalid token", "errors": [{"type": "invalid_token", "subject": "token"}]}""" - status_code = None - - def __init__(self): - self.url = None - self.data = None - self.kwargs = None - - def return_request(self, url, data=None, **kwargs): - """ - Mock request method. - """ - self.url = url - self.data = data - self.kwargs = kwargs - requests = MockRequests() - if self.flag == 1: - requests.status_code = 401 - else: - requests.status_code = 200 - return requests - - def post(self, url, data=None, **kwargs): - """ - Mock post method. - """ - return self.return_request(url, data, **kwargs) - - def delete(self, url, **kwargs): - """ - Mock delete method. - """ - return self.return_request(url, **kwargs) - - def get(self, url, **kwargs): - """ - Mock get method. - """ - return self.return_request(url, **kwargs) - - def put(self, url, data=None, **kwargs): - """ - Mock put method. - """ - return self.return_request(url, data, **kwargs) - - -class ServerdensityDeviceTestCase(TestCase, LoaderModuleMockMixin): - """ - TestCase for salt.modules.serverdensity_device - """ - - def setup_loader_modules(self): - return {serverdensity_device: {"requests": MockRequests()}} - - def setUp(self): - self.mock_json_loads = MagicMock(side_effect=ValueError()) - - # 'get_sd_auth' function tests: 1 - - def test_get_sd_auth(self): - """ - Tests if it returns requested Server Density - authentication value from pillar. - """ - with patch.dict(serverdensity_device.__pillar__, {"serverdensity": False}): - self.assertRaises( - CommandExecutionError, serverdensity_device.get_sd_auth, "1" - ) - - with patch.dict( - serverdensity_device.__pillar__, {"serverdensity": {"1": "salt"}} - ): - self.assertEqual(serverdensity_device.get_sd_auth("1"), "salt") - - self.assertRaises( - CommandExecutionError, serverdensity_device.get_sd_auth, "2" - ) - - # 'create' function tests: 1 - - def test_create(self): - """ - Tests if it create device in Server Density. - """ - with patch.dict( - serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}} - ): - self.assertTrue(serverdensity_device.create("rich_lama", group="lama_band")) - - with patch.object(salt.utils.json, "loads", self.mock_json_loads): - self.assertRaises( - CommandExecutionError, - serverdensity_device.create, - "rich_lama", - group="lama_band", - ) - - MockRequests.flag = 1 - self.assertIsNone( - serverdensity_device.create("rich_lama", group="lama_band") - ) - - # 'delete' function tests: 1 - - def test_delete(self): - """ - Tests if it delete a device from Server Density. - """ - with patch.dict( - serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}} - ): - MockRequests.flag = 0 - self.assertTrue(serverdensity_device.delete("51f7eaf")) - - with patch.object(salt.utils.json, "loads", self.mock_json_loads): - self.assertRaises( - CommandExecutionError, serverdensity_device.delete, "51f7eaf" - ) - - MockRequests.flag = 1 - self.assertIsNone(serverdensity_device.delete("51f7eaf")) - - # 'ls' function tests: 1 - - def test_ls(self): - """ - Tests if it list devices in Server Density. - """ - with patch.dict( - serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}} - ): - MockRequests.flag = 0 - self.assertTrue(serverdensity_device.ls(name="lama")) - - with patch.object(salt.utils.json, "loads", self.mock_json_loads): - self.assertRaises( - CommandExecutionError, serverdensity_device.ls, name="lama" - ) - - MockRequests.flag = 1 - self.assertIsNone(serverdensity_device.ls(name="lama")) - - # 'update' function tests: 1 - - def test_update(self): - """ - Tests if it updates device information in Server Density. - """ - with patch.dict( - serverdensity_device.__pillar__, {"serverdensity": {"api_token": "salt"}} - ): - MockRequests.flag = 0 - self.assertTrue(serverdensity_device.update("51f7eaf", name="lama")) - - with patch.object(salt.utils.json, "loads", self.mock_json_loads): - self.assertRaises( - CommandExecutionError, - serverdensity_device.update, - "51f7eaf", - name="lama", - ) - - MockRequests.flag = 1 - self.assertIsNone(serverdensity_device.update("51f7eaf", name="lama")) - - # 'install_agent' function tests: 1 - - def test_install_agent(self): - """ - Tests if it downloads Server Density installation agent, - and installs sd-agent with agent_key. - """ - mock = MagicMock(return_value=True) - with patch.dict( - serverdensity_device.__pillar__, {"serverdensity": {"account_url": "salt"}} - ): - with patch.dict(serverdensity_device.__salt__, {"cmd.run": mock}): - with patch.dict(serverdensity_device.__opts__, {"cachedir": "/"}): - self.assertTrue(serverdensity_device.install_agent("51f7e")) - - # 'install_agent_v2' function tests: 1 - - def test_install_agent_v2(self): - """ - Tests if it downloads Server Density installation agent, - and installs sd-agent with agent_key. - """ - mock = MagicMock(return_value=True) - with patch.dict( - serverdensity_device.__pillar__, {"serverdensity": {"account_name": "salt"}} - ): - with patch.dict(serverdensity_device.__salt__, {"cmd.run": mock}): - with patch.dict(serverdensity_device.__opts__, {"cachedir": "/"}): - self.assertTrue( - serverdensity_device.install_agent("51f7e", agent_version=2) - ) diff --git a/tests/unit/modules/test_servicenow.py b/tests/unit/modules/test_servicenow.py deleted file mode 100644 index 4cb004dcc295..000000000000 --- a/tests/unit/modules/test_servicenow.py +++ /dev/null @@ -1,60 +0,0 @@ -""" - :codeauthor: Anthony Shaw -""" - - -import salt.modules.servicenow as servicenow -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock -from tests.support.unit import TestCase - - -class MockServiceNowClient: - def __init__(self, instance_name, username, password): - pass - - def get(self, query): - return [{"query_size": len(query), "query_value": query}] - - -class ServiceNowModuleTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - module_globals = { - "Client": MockServiceNowClient, - "__salt__": { - "config.option": MagicMock( - return_value={ - "instance_name": "test", - "username": "mr_test", - "password": "test123", - } - ) - }, - } - if servicenow.HAS_LIBS is False: - module_globals["sys.modules"] = {"servicenow_rest": MagicMock()} - module_globals["sys.modules"][ - "servicenow_rest" - ].api.Client = MockServiceNowClient - return {servicenow: module_globals} - - def test_module_creation(self): - client = servicenow._get_client() - self.assertFalse(client is None) - - def test_non_structured_query(self): - result = servicenow.non_structured_query("tests", "role=web") - self.assertFalse(result is None) - self.assertEqual(result[0]["query_size"], 8) - self.assertEqual(result[0]["query_value"], "role=web") - - def test_non_structured_query_kwarg(self): - result = servicenow.non_structured_query("tests", role="web") - self.assertFalse(result is None) - self.assertEqual(result[0]["query_size"], 8) - self.assertEqual(result[0]["query_value"], "role=web") - - def test_non_structured_query_kwarg_multi(self): - result = servicenow.non_structured_query("tests", role="web", type="computer") - self.assertFalse(result is None) - self.assertEqual(result[0]["query_size"], 22) diff --git a/tests/unit/modules/test_status.py b/tests/unit/modules/test_status.py deleted file mode 100644 index 406e0953b695..000000000000 --- a/tests/unit/modules/test_status.py +++ /dev/null @@ -1,405 +0,0 @@ -import os - -import salt.modules.status as status -import salt.utils.platform -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, mock_open, patch -from tests.support.unit import TestCase - - -class StatusTestCase(TestCase, LoaderModuleMockMixin): - """ - test modules.status functions - """ - - def setup_loader_modules(self): - return {status: {}} - - def _set_up_test_uptime(self): - """ - Define common mock data for status.uptime tests - """ - - class MockData: - """ - Store mock data - """ - - m = MockData() - m.now = 1477004312 - m.ut = 1540154.00 - m.idle = 3047777.32 - m.ret = { - "users": 3, - "seconds": 1540154, - "since_t": 1475464158, - "days": 17, - "since_iso": "2016-10-03T03:09:18", - "time": "19:49", - } - - return m - - def _set_up_test_uptime_sunos(self): - """ - Define common mock data for cmd.run_all for status.uptime on SunOS - """ - - class MockData: - """ - Store mock data - """ - - m = MockData() - m.ret = { - "retcode": 0, - "stdout": "unix:0:system_misc:boot_time 1475464158", - } - - return m - - def test_uptime_linux(self): - """ - Test modules.status.uptime function for Linux - """ - m = self._set_up_test_uptime() - - with patch.multiple( - salt.utils.platform, - is_linux=MagicMock(return_value=True), - is_sunos=MagicMock(return_value=False), - is_darwin=MagicMock(return_value=False), - is_freebsd=MagicMock(return_value=False), - is_openbsd=MagicMock(return_value=False), - is_netbsd=MagicMock(return_value=False), - ), patch("salt.utils.path.which", MagicMock(return_value=True)), patch.dict( - status.__salt__, - {"cmd.run": MagicMock(return_value=os.linesep.join(["1", "2", "3"]))}, - ), patch( - "time.time", MagicMock(return_value=m.now) - ), patch( - "os.path.exists", MagicMock(return_value=True) - ): - proc_uptime = salt.utils.stringutils.to_str("{} {}".format(m.ut, m.idle)) - - with patch("salt.utils.files.fopen", mock_open(read_data=proc_uptime)): - ret = status.uptime() - self.assertDictEqual(ret, m.ret) - with patch("os.path.exists", MagicMock(return_value=False)): - with self.assertRaises(CommandExecutionError): - status.uptime() - - def test_uptime_sunos(self): - """ - Test modules.status.uptime function for SunOS - """ - m = self._set_up_test_uptime() - m2 = self._set_up_test_uptime_sunos() - with patch.multiple( - salt.utils.platform, - is_linux=MagicMock(return_value=False), - is_sunos=MagicMock(return_value=True), - is_darwin=MagicMock(return_value=False), - is_freebsd=MagicMock(return_value=False), - is_openbsd=MagicMock(return_value=False), - is_netbsd=MagicMock(return_value=False), - ), patch("salt.utils.path.which", MagicMock(return_value=True)), patch.dict( - status.__salt__, - { - "cmd.run": MagicMock(return_value=os.linesep.join(["1", "2", "3"])), - "cmd.run_all": MagicMock(return_value=m2.ret), - }, - ), patch( - "time.time", MagicMock(return_value=m.now) - ): - ret = status.uptime() - self.assertDictEqual(ret, m.ret) - - def test_uptime_macos(self): - """ - Test modules.status.uptime function for macOS - """ - m = self._set_up_test_uptime() - - kern_boottime = ( - "{{ sec = {0}, usec = {1:0<6} }} Mon Oct 03 03:09:18.23 2016".format( - *str(m.now - m.ut).split(".") - ) - ) - with patch.multiple( - salt.utils.platform, - is_linux=MagicMock(return_value=False), - is_sunos=MagicMock(return_value=False), - is_darwin=MagicMock(return_value=True), - is_freebsd=MagicMock(return_value=False), - is_openbsd=MagicMock(return_value=False), - is_netbsd=MagicMock(return_value=False), - ), patch("salt.utils.path.which", MagicMock(return_value=True)), patch.dict( - status.__salt__, - { - "cmd.run": MagicMock(return_value=os.linesep.join(["1", "2", "3"])), - "sysctl.get": MagicMock(return_value=kern_boottime), - }, - ), patch( - "time.time", MagicMock(return_value=m.now) - ): - - ret = status.uptime() - self.assertDictEqual(ret, m.ret) - - with patch.dict( - status.__salt__, {"sysctl.get": MagicMock(return_value="")} - ): - with self.assertRaises(CommandExecutionError): - status.uptime() - - def test_uptime_return_success_not_supported(self): - """ - Test modules.status.uptime function for other platforms - """ - with patch.multiple( - salt.utils.platform, - is_linux=MagicMock(return_value=False), - is_sunos=MagicMock(return_value=False), - is_darwin=MagicMock(return_value=False), - is_freebsd=MagicMock(return_value=False), - is_openbsd=MagicMock(return_value=False), - is_netbsd=MagicMock(return_value=False), - ): - exc_mock = MagicMock(side_effect=CommandExecutionError) - with self.assertRaises(CommandExecutionError): - with patch.dict(status.__salt__, {"cmd.run": exc_mock}): - status.uptime() - - def _set_up_test_cpustats_openbsd(self): - """ - Define mock data for status.cpustats on OpenBSD - """ - - class MockData: - """ - Store mock data - """ - - m = MockData() - m.ret = { - "0": { - "User": "0.0%", - "Nice": "0.0%", - "System": "4.5%", - "Interrupt": "0.5%", - "Idle": "95.0%", - } - } - - return m - - def test_cpustats_openbsd(self): - """ - Test modules.status.cpustats function for OpenBSD - """ - m = self._set_up_test_cpustats_openbsd() - - systat = ( - "\n\n 1 users Load 0.20 0.07 0.05 salt.localdomain" - " 09:42:42\nCPU User Nice System " - " Interrupt Idle\n0 0.0% 0.0% " - " 4.5% 0.5% 95.0%\n" - ) - - with patch.multiple( - salt.utils.platform, - is_linux=MagicMock(return_value=False), - is_sunos=MagicMock(return_value=False), - is_darwin=MagicMock(return_value=False), - is_freebsd=MagicMock(return_value=False), - is_openbsd=MagicMock(return_value=True), - is_netbsd=MagicMock(return_value=False), - ), patch("salt.utils.path.which", MagicMock(return_value=True)), patch.dict( - status.__grains__, {"kernel": "OpenBSD"} - ), patch.dict( - status.__salt__, {"cmd.run": MagicMock(return_value=systat)} - ): - ret = status.cpustats() - self.assertDictEqual(ret, m.ret) - - def _set_up_test_cpuinfo_bsd(self): - class MockData: - """ - Store mock data - """ - - m = MockData() - m.ret = { - "hw.model": "Intel(R) Core(TM) i5-7287U CPU @ 3.30GHz", - "hw.ncpu": "4", - } - - return m - - def test_cpuinfo_freebsd(self): - m = self._set_up_test_cpuinfo_bsd() - sysctl = "hw.model:Intel(R) Core(TM) i5-7287U CPU @ 3.30GHz\nhw.ncpu:4" - - with patch.dict(status.__grains__, {"kernel": "FreeBSD"}): - with patch.dict( - status.__salt__, {"cmd.run": MagicMock(return_value=sysctl)} - ): - ret = status.cpuinfo() - self.assertDictEqual(ret, m.ret) - - def test_cpuinfo_openbsd(self): - m = self._set_up_test_cpuinfo_bsd() - sysctl = "hw.model=Intel(R) Core(TM) i5-7287U CPU @ 3.30GHz\nhw.ncpu=4" - - for bsd in ["NetBSD", "OpenBSD"]: - with patch.dict(status.__grains__, {"kernel": bsd}): - with patch.dict( - status.__salt__, {"cmd.run": MagicMock(return_value=sysctl)} - ): - ret = status.cpuinfo() - self.assertDictEqual(ret, m.ret) - - def _set_up_test_meminfo_openbsd(self): - class MockData: - """ - Store mock data - """ - - m = MockData() - m.ret = { - "active virtual pages": "355M", - "free list size": "305M", - "page faults": "845", - "pages reclaimed": "1", - "pages paged in": "2", - "pages paged out": "3", - "pages freed": "4", - "pages scanned": "5", - } - - return m - - def test_meminfo_openbsd(self): - m = self._set_up_test_meminfo_openbsd() - vmstat = ( - " procs memory page disks traps " - " cpu\n r s avm fre flt re pi po fr sr cd0 sd0 int sys " - " cs us sy id\n 2 103 355M 305M 845 1 2 3 4 5 0 1 21 " - " 682 86 1 1 98" - ) - - with patch.dict(status.__grains__, {"kernel": "OpenBSD"}): - with patch.dict( - status.__salt__, {"cmd.run": MagicMock(return_value=vmstat)} - ): - ret = status.meminfo() - self.assertDictEqual(ret, m.ret) - - def _set_up_test_w_linux(self): - """ - Define mock data for status.w on Linux - """ - - class MockData: - """ - Store mock data - """ - - m = MockData() - m.ret = [ - { - "idle": "0s", - "jcpu": "0.24s", - "login": "13:42", - "pcpu": "0.16s", - "tty": "pts/1", - "user": "root", - "what": "nmap -sV 10.2.2.2", - } - ] - - return m - - def _set_up_test_w_bsd(self): - """ - Define mock data for status.w on Linux - """ - - class MockData: - """ - Store mock data - """ - - m = MockData() - m.ret = [ - { - "idle": "0", - "from": "10.2.2.1", - "login": "1:42PM", - "tty": "p1", - "user": "root", - "what": "nmap -sV 10.2.2.2", - } - ] - - return m - - def test_w_linux(self): - m = self._set_up_test_w_linux() - w_output = "root pts/1 13:42 0s 0.24s 0.16s nmap -sV 10.2.2.2" - - with patch.dict(status.__grains__, {"kernel": "Linux"}): - with patch.dict( - status.__salt__, {"cmd.run": MagicMock(return_value=w_output)} - ): - ret = status.w() - self.assertListEqual(ret, m.ret) - - def test_w_bsd(self): - m = self._set_up_test_w_bsd() - w_output = "root p1 10.2.2.1 1:42PM 0 nmap -sV 10.2.2.2" - - for bsd in ["Darwin", "FreeBSD", "OpenBSD"]: - with patch.dict(status.__grains__, {"kernel": bsd}): - with patch.dict( - status.__salt__, {"cmd.run": MagicMock(return_value=w_output)} - ): - ret = status.w() - self.assertListEqual(ret, m.ret) - - def _set_up_test_status_pid_linux(self): - class MockData: - """ - Store mock data - """ - - m = MockData() - m.ret = "2701\n7539\n7540\n7542\n7623" - return m - - def test_status_pid_linux(self): - m = self._set_up_test_status_pid_linux() - ps = ( - "UID PID PPID C STIME TTY TIME CMD\nroot 360 2 0 Jun08 ?" - " 00:00:00 [jbd2/dm-0-8]\nroot 947 2 0 Jun08 ? 00:00:00 " - " [jbd2/dm-1-8]\nroot 949 2 0 Jun08 ? 00:00:09 " - " [jbd2/dm-3-8]\nroot 951 2 0 Jun08 ? 00:00:00 " - " [jbd2/dm-4-8]\nroot 2701 1 0 Jun08 ? 00:00:28 /usr/sbin/httpd" - " -k start\napache 7539 2701 0 04:40 ? 00:00:04 /usr/sbin/httpd -k" - " start\napache 7540 2701 0 04:40 ? 00:00:02 /usr/sbin/httpd -k" - " start\napache 7542 2701 0 04:40 ? 00:01:46 /usr/sbin/httpd -k" - " start\napache 7623 2701 0 04:40 ? 00:02:41 /usr/sbin/httpd -k" - " start\nroot 1564 1 0 Jun11 ? 00:07:19 /usr/bin/python3" - " /usr/bin/salt-minion -d\nroot 6674 1564 0 19:53 ? 00:00:00 " - " /usr/bin/python3 /usr/bin/salt-call status.pid httpd -l debug" - ) - - with patch.dict(status.__grains__, {"ps": "ps -efHww"}): - with patch.dict( - status.__salt__, {"cmd.run_stdout": MagicMock(return_value=ps)} - ): - with patch.object(os, "getpid", return_value="6674"): - ret = status.pid("httpd") - self.assertEqual(ret, m.ret) diff --git a/tests/unit/modules/test_swift.py b/tests/unit/modules/test_swift.py deleted file mode 100644 index fec2fba55606..000000000000 --- a/tests/unit/modules/test_swift.py +++ /dev/null @@ -1,66 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - - -import salt.modules.swift as swift -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class SwiftTestCase(TestCase): - """ - Test cases for salt.modules.swift - """ - - # 'delete' function tests: 1 - - def test_delete(self): - """ - Test for delete a container, or delete an object from a container. - """ - with patch.object(swift, "_auth", MagicMock()): - self.assertTrue(swift.delete("mycontainer")) - - self.assertTrue(swift.delete("mycontainer", path="myfile.png")) - - # 'get' function tests: 1 - - def test_get(self): - """ - Test for list the contents of a container, - or return an object from a container. - """ - with patch.object(swift, "_auth", MagicMock()): - self.assertTrue(swift.get()) - - self.assertTrue(swift.get("mycontainer")) - - self.assertTrue( - swift.get("mycontainer", path="myfile.png", return_bin=True) - ) - - self.assertTrue( - swift.get( - "mycontainer", path="myfile.png", local_file="/tmp/myfile.png" - ) - ) - - self.assertFalse(swift.get("mycontainer", path="myfile.png")) - - # 'put' function tests: 1 - - def test_put(self): - """ - Test for create a new container, or upload an object to a container. - """ - with patch.object(swift, "_auth", MagicMock()): - self.assertTrue(swift.put("mycontainer")) - - self.assertTrue( - swift.put( - "mycontainer", path="myfile.png", local_file="/tmp/myfile.png" - ) - ) - - self.assertFalse(swift.put("mycontainer", path="myfile.png")) diff --git a/tests/unit/modules/test_syslog_ng.py b/tests/unit/modules/test_syslog_ng.py deleted file mode 100644 index 0a45fc401c2f..000000000000 --- a/tests/unit/modules/test_syslog_ng.py +++ /dev/null @@ -1,347 +0,0 @@ -import os -from textwrap import dedent - -import pytest - -import salt.modules.syslog_ng as syslog_ng -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -_VERSION = "3.6.0alpha0" -_MODULES = ( - "syslogformat,json-plugin,basicfuncs,afstomp,afsocket,cryptofuncs," - "afmongodb,dbparser,system-source,affile,pseudofile,afamqp," - "afsocket-notls,csvparser,linux-kmsg-format,afuser,confgen,afprog" -) - -VERSION_OUTPUT = """syslog-ng {0} -Installer-Version: {0} -Revision: -Compile-Date: Apr 4 2014 20:26:18 -Error opening plugin module; module='afsocket-tls', error='/home/tibi/install/syslog-ng/lib/syslog-ng/libafsocket-tls.so: undefined symbol: tls_context_setup_session' -Available-Modules: {1} -Enable-Debug: on -Enable-GProf: off -Enable-Memtrace: off -Enable-IPv6: on -Enable-Spoof-Source: off -Enable-TCP-Wrapper: off -Enable-Linux-Caps: off""".format( - _VERSION, _MODULES -) - -STATS_OUTPUT = """SourceName;SourceId;SourceInstance;State;Type;Number -center;;received;a;processed;0 -destination;#anon-destination0;;a;processed;0 -destination;#anon-destination1;;a;processed;0 -source;s_gsoc2014;;a;processed;0 -center;;queued;a;processed;0 -global;payload_reallocs;;a;processed;0 -global;sdata_updates;;a;processed;0 -global;msg_clones;;a;processed;0""" - -_SYSLOG_NG_NOT_INSTALLED_RETURN_VALUE = { - "retcode": -1, - "stderr": "Unable to execute the command 'syslog-ng'. It is not in the PATH.", -} -_SYSLOG_NG_CTL_NOT_INSTALLED_RETURN_VALUE = { - "retcode": -1, - "stderr": "Unable to execute the command 'syslog-ng-ctl'. It is not in the PATH.", -} - - -class SyslogNGTestCase(TestCase, LoaderModuleMockMixin): - - # pylint: disable=blacklisted-function - orig_env = {"PATH": "/foo:/bar"} - bin_dir = "/baz" - mocked_env = {"PATH": "/foo:/bar:/baz"} - # pylint: enable=blacklisted-function - - def setup_loader_modules(self): - return {syslog_ng: {}} - - def test_statement_without_options(self): - s = syslog_ng.Statement("source", "s_local", options=[]) - b = s.build() - self.assertEqual( - dedent( - """\ - source s_local { - }; - """ - ), - b, - ) - - def test_non_empty_statement(self): - o1 = syslog_ng.Option("file") - o2 = syslog_ng.Option("tcp") - s = syslog_ng.Statement("source", "s_local", options=[o1, o2]) - b = s.build() - self.assertEqual( - dedent( - """\ - source s_local { - file( - ); - tcp( - ); - }; - """ - ), - b, - ) - - def test_option_with_parameters(self): - o1 = syslog_ng.Option("file") - p1 = syslog_ng.SimpleParameter('"/var/log/messages"') - p2 = syslog_ng.SimpleParameter() - p3 = syslog_ng.TypedParameter() - p3.type = "tls" - p2.value = '"/var/log/syslog"' - o1.add_parameter(p1) - o1.add_parameter(p2) - o1.add_parameter(p3) - b = o1.build() - self.assertEqual( - dedent( - """\ - file( - "/var/log/messages", - "/var/log/syslog", - tls( - ) - ); - """ - ), - b, - ) - - def test_parameter_with_values(self): - p = syslog_ng.TypedParameter() - p.type = "tls" - v1 = syslog_ng.TypedParameterValue() - v1.type = "key_file" - - v2 = syslog_ng.TypedParameterValue() - v2.type = "cert_file" - - p.add_value(v1) - p.add_value(v2) - - b = p.build() - self.assertEqual( - dedent( - """\ - tls( - key_file( - ), - cert_file( - ) - )""" - ), - b, - ) - - def test_value_with_arguments(self): - t = syslog_ng.TypedParameterValue() - t.type = "key_file" - - a1 = syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"') - a2 = syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"') - - t.add_argument(a1) - t.add_argument(a2) - - b = t.build() - self.assertEqual( - dedent( - """\ - key_file( - "/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key" - "/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key" - )""" - ), - b, - ) - - def test_end_to_end_statement_generation(self): - s = syslog_ng.Statement("source", "s_tls") - - o = syslog_ng.Option("tcp") - - ip = syslog_ng.TypedParameter("ip") - ip.add_value(syslog_ng.SimpleParameterValue("'192.168.42.2'")) - o.add_parameter(ip) - - port = syslog_ng.TypedParameter("port") - port.add_value(syslog_ng.SimpleParameterValue(514)) - o.add_parameter(port) - - tls = syslog_ng.TypedParameter("tls") - key_file = syslog_ng.TypedParameterValue("key_file") - key_file.add_argument( - syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key"') - ) - cert_file = syslog_ng.TypedParameterValue("cert_file") - cert_file.add_argument( - syslog_ng.Argument('"/opt/syslog-ng/etc/syslog-ng/cert.d/syslog-ng.cert"') - ) - peer_verify = syslog_ng.TypedParameterValue("peer_verify") - peer_verify.add_argument(syslog_ng.Argument("optional-untrusted")) - tls.add_value(key_file) - tls.add_value(cert_file) - tls.add_value(peer_verify) - o.add_parameter(tls) - - s.add_child(o) - b = s.build() - self.assertEqual( - dedent( - """\ - source s_tls { - tcp( - ip( - '192.168.42.2' - ), - port( - 514 - ), - tls( - key_file( - "/opt/syslog-ng/etc/syslog-ng/key.d/syslog-ng.key" - ), - cert_file( - "/opt/syslog-ng/etc/syslog-ng/cert.d/syslog-ng.cert" - ), - peer_verify( - optional-untrusted - ) - ) - ); - }; - """ - ), - b, - ) - - @pytest.mark.skip_on_windows(reason="Module not available on Windows") - def test_version(self): - cmd_ret = {"retcode": 0, "stdout": VERSION_OUTPUT} - expected_output = {"retcode": 0, "stdout": _VERSION} - cmd_args = ["syslog-ng", "-V"] - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - result = syslog_ng.version() - self.assertEqual(result, expected_output) - cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - result = syslog_ng.version(syslog_ng_sbin_dir=self.bin_dir) - self.assertEqual(result, expected_output) - cmd_mock.assert_called_once_with( - cmd_args, env=self.mocked_env, python_shell=False - ) - - @pytest.mark.skip_on_windows(reason="Module not available on Windows") - def test_stats(self): - cmd_ret = {"retcode": 0, "stdout": STATS_OUTPUT} - cmd_args = ["syslog-ng-ctl", "stats"] - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - result = syslog_ng.stats() - self.assertEqual(result, cmd_ret) - cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - result = syslog_ng.stats(syslog_ng_sbin_dir=self.bin_dir) - self.assertEqual(result, cmd_ret) - cmd_mock.assert_called_once_with( - cmd_args, env=self.mocked_env, python_shell=False - ) - - @pytest.mark.skip_on_windows(reason="Module not available on Windows") - def test_modules(self): - cmd_ret = {"retcode": 0, "stdout": VERSION_OUTPUT} - expected_output = {"retcode": 0, "stdout": _MODULES} - cmd_args = ["syslog-ng", "-V"] - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - result = syslog_ng.modules() - self.assertEqual(result, expected_output) - cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - result = syslog_ng.modules(syslog_ng_sbin_dir=self.bin_dir) - self.assertEqual(result, expected_output) - cmd_mock.assert_called_once_with( - cmd_args, env=self.mocked_env, python_shell=False - ) - - @pytest.mark.skip_on_windows(reason="Module not available on Windows") - def test_config_test(self): - cmd_ret = {"retcode": 0, "stderr": "", "stdout": "Foo"} - cmd_args = ["syslog-ng", "--syntax-only"] - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - result = syslog_ng.config_test() - self.assertEqual(result, cmd_ret) - cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - result = syslog_ng.config_test(syslog_ng_sbin_dir=self.bin_dir) - self.assertEqual(result, cmd_ret) - cmd_mock.assert_called_once_with( - cmd_args, env=self.mocked_env, python_shell=False - ) - - @pytest.mark.skip_on_windows(reason="Module not available on Windows") - def test_config_test_cfgfile(self): - cfgfile = "/path/to/syslog-ng.conf" - cmd_ret = {"retcode": 1, "stderr": "Syntax error...", "stdout": ""} - cmd_args = ["syslog-ng", "--syntax-only", "--cfgfile={}".format(cfgfile)] - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - self.assertEqual(syslog_ng.config_test(cfgfile=cfgfile), cmd_ret) - cmd_mock.assert_called_once_with(cmd_args, env=None, python_shell=False) - - cmd_mock = MagicMock(return_value=cmd_ret) - with patch.dict(syslog_ng.__salt__, {"cmd.run_all": cmd_mock}), patch.dict( - os.environ, self.orig_env - ): - self.assertEqual( - syslog_ng.config_test(syslog_ng_sbin_dir=self.bin_dir, cfgfile=cfgfile), - cmd_ret, - ) - cmd_mock.assert_called_once_with( - cmd_args, env=self.mocked_env, python_shell=False - ) diff --git a/tests/unit/modules/test_telegram.py b/tests/unit/modules/test_telegram.py deleted file mode 100644 index b1babed02747..000000000000 --- a/tests/unit/modules/test_telegram.py +++ /dev/null @@ -1,86 +0,0 @@ -""" -Tests for the Telegram execution module. - -:codeauthor: :email:`Roald Nefs (info@roaldnefs.com)` -""" - - -import logging - -import salt.modules.telegram as telegram -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, Mock -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - - -class RequestMock(Mock): - """ - Request Mock - """ - - def get(self, *args, **kwargs): - return RequestResponseMock() - - def put(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs - return RequestPutResponseMock() - - def delete(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs - return RequestResponseMock() - - -class RequestResponseMock(Mock): - """ - Request Response Mock - """ - - def json(self): - return [ - {"url": "http://example.org", "_id": 1234}, - ] - - -class RequestPutResponseMock(Mock): - """ - Request Put Response Mock - """ - - ok = True - - def json(self): - return {"_id": 4321} - - -class TelegramModuleTest(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.telegram. - """ - - def setup_loader_modules(self): - module_globals = { - "__salt__": { - "config.get": MagicMock( - return_value={ - "telegram": { - "chat_id": "123456789", - "token": "000000000:xxxxxxxxxxxxxxxxxxxxxxxxxxxxxx", - } - } - ), - "requests.put": Mock(), - }, - "requests": RequestMock(), - } - return {telegram: module_globals} - - def test_post_message(self): - """ - Test the post_message function. - """ - message = "Hello World!" - self.assertTrue(telegram.post_message(message)) diff --git a/tests/unit/modules/test_uptime.py b/tests/unit/modules/test_uptime.py deleted file mode 100644 index 0201f46d08e2..000000000000 --- a/tests/unit/modules/test_uptime.py +++ /dev/null @@ -1,76 +0,0 @@ -import salt.modules.uptime as uptime -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import Mock -from tests.support.unit import TestCase - - -class RequestMock(Mock): - """ - Request Mock - """ - - def get(self, *args, **kwargs): - return RequestResponseMock() - - def put(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs - return RequestPutResponseMock() - - def delete(self, *args, **kwargs): - self.args = args - self.kwargs = kwargs - return RequestResponseMock() - - -class RequestResponseMock(Mock): - def json(self): - return [ - {"url": "http://example.org", "_id": 1234}, - ] - - -class RequestPutResponseMock(Mock): - - ok = True - - def json(self): - return {"_id": 4321} - - -REQUEST_MOCK = RequestMock() - - -class UptimeTestCase(TestCase, LoaderModuleMockMixin): - """ - UptimeTestCase - """ - - def setup_loader_modules(self): - return { - uptime: { - "__salt__": { - "pillar.get": Mock(return_value="http://localhost:5000"), - "requests.put": Mock(), - }, - "requests": REQUEST_MOCK, - } - } - - def test_checks_list(self): - ret = uptime.checks_list() - self.assertListEqual(ret, ["http://example.org"]) - - def test_checks_exists(self): - self.assertTrue(uptime.check_exists("http://example.org") is True) - - def test_checks_create(self): - self.assertRaises(CommandExecutionError, uptime.create, "http://example.org") - self.assertEqual(4321, uptime.create("http://example.com")) - self.assertEqual(("http://localhost:5000/api/checks",), REQUEST_MOCK.args) - - def test_checks_delete(self): - self.assertRaises(CommandExecutionError, uptime.delete, "http://example.com") - self.assertTrue(uptime.delete("http://example.org") is True) - self.assertEqual(("http://localhost:5000/api/checks/1234",), REQUEST_MOCK.args) diff --git a/tests/unit/modules/test_uwsgi.py b/tests/unit/modules/test_uwsgi.py deleted file mode 100644 index 2f5a735f10df..000000000000 --- a/tests/unit/modules/test_uwsgi.py +++ /dev/null @@ -1,23 +0,0 @@ -import salt.modules.uwsgi as uwsgi -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, Mock, patch -from tests.support.unit import TestCase - - -class UwsgiTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - patcher = patch("salt.utils.path.which", Mock(return_value="/usr/bin/uwsgi")) - patcher.start() - self.addCleanup(patcher.stop) - return {uwsgi: {}} - - def test_uwsgi_stats(self): - socket = "127.0.0.1:5050" - mock = MagicMock(return_value='{"a": 1, "b": 2}') - with patch.dict(uwsgi.__salt__, {"cmd.run": mock}): - result = uwsgi.stats(socket) - mock.assert_called_once_with( - ["uwsgi", "--connect-and-read", "{}".format(socket)], - python_shell=False, - ) - self.assertEqual(result, {"a": 1, "b": 2}) diff --git a/tests/unit/modules/test_vagrant.py b/tests/unit/modules/test_vagrant.py deleted file mode 100644 index ee7411de1b56..000000000000 --- a/tests/unit/modules/test_vagrant.py +++ /dev/null @@ -1,174 +0,0 @@ -import os - -import salt.exceptions -import salt.modules.vagrant as vagrant -import salt.utils.platform -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -TEMP_DATABASE_FILE = "/tmp/salt-tests-tmpdir/test_vagrant.sqlite" - - -class VagrantTestCase(TestCase, LoaderModuleMockMixin): - """ - Unit TestCase for the salt.modules.vagrant module. - """ - - LOCAL_OPTS = { - "extension_modules": "", - "vagrant_sdb_data": { - "driver": "sqlite3", - "database": TEMP_DATABASE_FILE, - "table": "sdb", - "create_table": True, - }, - } - - def setup_loader_modules(self): - vagrant_globals = { - "__opts__": self.LOCAL_OPTS, - } - return {vagrant: vagrant_globals} - - def test_vagrant_get_vm_info_not_found(self): - mock_sdb = MagicMock(return_value=None) - with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}): - with self.assertRaises(salt.exceptions.SaltInvocationError): - vagrant.get_vm_info("thisNameDoesNotExist") - - def test_vagrant_init_positional(self): - path_nowhere = os.path.join(os.sep, "tmp", "nowhere") - if salt.utils.platform.is_windows(): - path_nowhere = "c:{}".format(path_nowhere) - mock_sdb = MagicMock(return_value=None) - with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}): - resp = vagrant.init( - "test1", - path_nowhere, - "onetest", - "nobody", - False, - "french", - {"different": "very"}, - ) - self.assertTrue(resp.startswith("Name test1 defined")) - expected = dict( - name="test1", - cwd=path_nowhere, - machine="onetest", - runas="nobody", - vagrant_provider="french", - different="very", - ) - mock_sdb.assert_called_with( - "sdb://vagrant_sdb_data/onetest?{}".format(path_nowhere), - "test1", - self.LOCAL_OPTS, - ) - mock_sdb.assert_any_call( - "sdb://vagrant_sdb_data/test1", expected, self.LOCAL_OPTS - ) - - def test_vagrant_get_vm_info(self): - testdict = {"testone": "one", "machine": "two"} - mock_sdb = MagicMock(return_value=testdict) - with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}): - resp = vagrant.get_vm_info("test1") - self.assertEqual(resp, testdict) - - def test_vagrant_init_dict(self): - testdict = dict( - cwd="/tmp/anywhere", - machine="twotest", - runas="somebody", - vagrant_provider="english", - ) - expected = testdict.copy() - expected["name"] = "test2" - mock_sdb = MagicMock(return_value=None) - with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}): - vagrant.init("test2", vm=testdict) - mock_sdb.assert_any_call( - "sdb://vagrant_sdb_data/test2", expected, self.LOCAL_OPTS - ) - - def test_vagrant_init_arg_override(self): - testdict = dict( - cwd="/tmp/there", - machine="treetest", - runas="anybody", - vagrant_provider="spansh", - ) - mock_sdb = MagicMock(return_value=None) - with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}): - vagrant.init( - "test3", - cwd="/tmp", - machine="threetest", - runas="him", - vagrant_provider="polish", - vm=testdict, - ) - expected = dict( - name="test3", - cwd="/tmp", - machine="threetest", - runas="him", - vagrant_provider="polish", - ) - mock_sdb.assert_any_call( - "sdb://vagrant_sdb_data/test3", expected, self.LOCAL_OPTS - ) - - def test_vagrant_get_ssh_config_fails(self): - mock_sdb = MagicMock(return_value=None) - with patch.dict(vagrant.__utils__, {"sdb.sdb_set": mock_sdb}): - mock_sdb = MagicMock(return_value={}) - with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb}): - vagrant.init("test3", cwd="/tmp") - with self.assertRaises(salt.exceptions.SaltInvocationError): - vagrant.get_ssh_config("test3") # has not been started - - def test_vagrant_destroy(self): - path_mydir = os.path.join(os.sep, "my", "dir") - if salt.utils.platform.is_windows(): - path_mydir = "c:{}".format(path_mydir) - mock_cmd = MagicMock(return_value={"retcode": 0}) - with patch.dict(vagrant.__salt__, {"cmd.run_all": mock_cmd}): - mock_sdb = MagicMock(return_value=None) - with patch.dict(vagrant.__utils__, {"sdb.sdb_delete": mock_sdb}): - mock_sdb_get = MagicMock( - return_value={"machine": "macfour", "cwd": path_mydir} - ) - with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb_get}): - self.assertTrue(vagrant.destroy("test4")) - mock_sdb.assert_any_call( - "sdb://vagrant_sdb_data/macfour?{}".format(path_mydir), - self.LOCAL_OPTS, - ) - mock_sdb.assert_any_call( - "sdb://vagrant_sdb_data/test4", self.LOCAL_OPTS - ) - cmd = "vagrant destroy -f macfour" - mock_cmd.assert_called_with( - cmd, runas=None, cwd=path_mydir, output_loglevel="info" - ) - - def test_vagrant_start(self): - mock_cmd = MagicMock(return_value={"retcode": 0}) - with patch.dict(vagrant.__salt__, {"cmd.run_all": mock_cmd}): - mock_sdb_get = MagicMock( - return_value={ - "machine": "five", - "cwd": "/the/dir", - "runas": "me", - "vagrant_provider": "him", - } - ) - with patch.dict(vagrant.__utils__, {"sdb.sdb_get": mock_sdb_get}): - self.assertTrue(vagrant.start("test5")) - cmd = "vagrant up five --provider=him" - mock_cmd.assert_called_with( - cmd, runas="me", cwd="/the/dir", output_loglevel="info" - ) diff --git a/tests/unit/modules/test_vault.py b/tests/unit/modules/test_vault.py deleted file mode 100644 index 59aebae7faf2..000000000000 --- a/tests/unit/modules/test_vault.py +++ /dev/null @@ -1,188 +0,0 @@ -""" -Test case for the vault execution module -""" - - -import salt.modules.vault as vault -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class TestVaultModule(LoaderModuleMockMixin, TestCase): - """ - Test case for the vault execution module - """ - - def setup_loader_modules(self): - return { - vault: { - "__opts__": { - "vault": { - "url": "http://127.0.0.1", - "auth": {"token": "test", "method": "token"}, - } - }, - "__grains__": {"id": "test-minion"}, - } - } - - def test_read_secret_v1(self): - """ - Test salt.modules.vault.read_secret function - """ - version = {"v2": False, "data": None, "metadata": None, "type": None} - mock_version = MagicMock(return_value=version) - mock_vault = MagicMock() - mock_vault.return_value.status_code = 200 - mock_vault.return_value.json.return_value = {"data": {"key": "test"}} - with patch.dict( - vault.__utils__, {"vault.make_request": mock_vault} - ), patch.dict(vault.__utils__, {"vault.is_v2": mock_version}): - vault_return = vault.read_secret("/secret/my/secret") - - self.assertDictEqual(vault_return, {"key": "test"}) - - def test_read_secret_v1_key(self): - """ - Test salt.modules.vault.read_secret function specifying key - """ - version = {"v2": False, "data": None, "metadata": None, "type": None} - mock_version = MagicMock(return_value=version) - mock_vault = MagicMock() - mock_vault.return_value.status_code = 200 - mock_vault.return_value.json.return_value = {"data": {"key": "somevalue"}} - with patch.dict( - vault.__utils__, {"vault.make_request": mock_vault} - ), patch.dict(vault.__utils__, {"vault.is_v2": mock_version}): - vault_return = vault.read_secret("/secret/my/secret", "key") - - self.assertEqual(vault_return, "somevalue") - - def test_read_secret_v2(self): - """ - Test salt.modules.vault.read_secret function for v2 of kv secret backend - """ - # given path secrets/mysecret generate v2 output - version = { - "v2": True, - "data": "secrets/data/mysecret", - "metadata": "secrets/metadata/mysecret", - "type": "kv", - } - mock_version = MagicMock(return_value=version) - mock_vault = MagicMock() - mock_vault.return_value.status_code = 200 - v2_return = { - "data": { - "data": {"akey": "avalue"}, - "metadata": { - "created_time": "2018-10-23T20:21:55.042755098Z", - "destroyed": False, - "version": 13, - "deletion_time": "", - }, - } - } - - mock_vault.return_value.json.return_value = v2_return - with patch.dict( - vault.__utils__, {"vault.make_request": mock_vault} - ), patch.dict(vault.__utils__, {"vault.is_v2": mock_version}): - # Validate metadata returned - vault_return = vault.read_secret("/secret/my/secret", metadata=True) - self.assertDictContainsSubset({"data": {"akey": "avalue"}}, vault_return) - # Validate just data returned - vault_return = vault.read_secret("/secret/my/secret") - self.assertDictContainsSubset({"akey": "avalue"}, vault_return) - - def test_read_secret_v2_key(self): - """ - Test salt.modules.vault.read_secret function for v2 of kv secret backend - with specified key - """ - # given path secrets/mysecret generate v2 output - version = { - "v2": True, - "data": "secrets/data/mysecret", - "metadata": "secrets/metadata/mysecret", - "type": "kv", - } - mock_version = MagicMock(return_value=version) - mock_vault = MagicMock() - mock_vault.return_value.status_code = 200 - v2_return = { - "data": { - "data": {"akey": "avalue"}, - "metadata": { - "created_time": "2018-10-23T20:21:55.042755098Z", - "destroyed": False, - "version": 13, - "deletion_time": "", - }, - } - } - - mock_vault.return_value.json.return_value = v2_return - with patch.dict( - vault.__utils__, {"vault.make_request": mock_vault} - ), patch.dict(vault.__utils__, {"vault.is_v2": mock_version}): - vault_return = vault.read_secret("/secret/my/secret", "akey") - - self.assertEqual(vault_return, "avalue") - - -class VaultDefaultTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for the default argument in the vault module - - NOTE: This test class is crafted such that the vault.make_request call will - always fail. If you want to add other unit tests, you should put them in a - separate class. - """ - - def setup_loader_modules(self): - return { - vault: { - "__grains__": {"id": "foo"}, - "__utils__": { - "vault.make_request": MagicMock(side_effect=Exception("FAILED")), - "vault.is_v2": MagicMock( - return_value={ - "v2": True, - "data": "secrets/data/mysecret", - "metadata": "secrets/metadata/mysecret", - "type": "kv", - } - ), - }, - }, - } - - def setUp(self): - self.path = "foo/bar/" - - def test_read_secret_with_default(self): - assert vault.read_secret(self.path, default="baz") == "baz" - - def test_read_secret_no_default(self): - try: - vault.read_secret(self.path) - except CommandExecutionError: - # This is expected - pass - else: - raise Exception("Should have raised a CommandExecutionError") - - def test_list_secrets_with_default(self): - assert vault.list_secrets(self.path, default=["baz"]) == ["baz"] - - def test_list_secrets_no_default(self): - try: - vault.list_secrets(self.path) - except CommandExecutionError: - # This is expected - pass - else: - raise Exception("Should have raised a CommandExecutionError") diff --git a/tests/unit/modules/test_virt.py b/tests/unit/modules/test_virt.py index 7e72d07b8e77..2fee41f8bd97 100644 --- a/tests/unit/modules/test_virt.py +++ b/tests/unit/modules/test_virt.py @@ -2,32 +2,27 @@ virt execution module unit tests """ -# pylint: disable=3rd-party-module-not-gated - - import datetime import os import shutil import tempfile import xml.etree.ElementTree as ET +import pytest + import salt.config import salt.modules.config as config import salt.modules.virt as virt import salt.syspaths import salt.utils.yaml from salt.exceptions import CommandExecutionError, SaltInvocationError - -# pylint: disable=import-error from tests.support.helpers import dedent from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch from tests.support.unit import TestCase -# pylint: disable=invalid-name,protected-access,attribute-defined-outside-init,too-many-public-methods,unused-argument - -class LibvirtMock(MagicMock): # pylint: disable=too-many-ancestors +class LibvirtMock(MagicMock): """ Libvirt library mock """ @@ -1882,6 +1877,7 @@ def test_diff_disks(self): ], ) + @pytest.mark.skip_on_fips_enabled_platform def test_init(self): """ Test init() function diff --git a/tests/unit/modules/test_win_iis.py b/tests/unit/modules/test_win_iis.py deleted file mode 100644 index 07ac7090c51c..000000000000 --- a/tests/unit/modules/test_win_iis.py +++ /dev/null @@ -1,889 +0,0 @@ -""" - :synopsis: Unit Tests for Windows IIS Module 'module.win_iis' - :platform: Windows - :maturity: develop - versionadded:: 2016.11.0 -""" -import salt.modules.win_iis as win_iis -import salt.utils.json -from salt.exceptions import SaltInvocationError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, call, patch -from tests.support.unit import TestCase - -APP_LIST = { - "testApp": { - "apppool": "MyTestPool", - "path": "/testApp", - "preload": False, - "protocols": ["http"], - "sourcepath": r"C:\inetpub\apps\testApp", - } -} - -APPPOOL_LIST = {"MyTestPool": {"applications": ["MyTestSite"], "state": "Started"}} - -BINDING_LIST = { - "*:80:": { - "certificatehash": None, - "certificatestorename": None, - "hostheader": None, - "ipaddress": "*", - "port": 80, - "protocol": "http", - "sslflags": 0, - }, - "*:443:mytestsite.local": { - "certificatehash": "9988776655443322111000AAABBBCCCDDDEEEFFF", - "certificatestorename": "My", - "hostheader": "mytestsite.local", - "ipaddress": "*", - "port": 443, - "protocol": "https", - "sslflags": 0, - }, -} - -SITE_LIST = { - "MyTestSite": { - "apppool": "MyTestPool", - "bindings": BINDING_LIST, - "id": 1, - "sourcepath": r"C:\inetpub\wwwroot", - "state": "Started", - } -} - -VDIR_LIST = {"TestVdir": {"sourcepath": r"C:\inetpub\vdirs\TestVdir"}} -NESTED_VDIR_LIST = { - "Test/Nested/Vdir": {"sourcepath": r"C:\inetpub\vdirs\NestedTestVdir"} -} - - -LIST_APPS_SRVMGR = { - "retcode": 0, - "stdout": salt.utils.json.dumps( - [ - { - "applicationPool": "MyTestPool", - "name": "testApp", - "path": "/testApp", - "PhysicalPath": r"C:\inetpub\apps\testApp", - "preloadEnabled": False, - "protocols": "http", - } - ] - ), -} - -LIST_APPPOOLS_SRVMGR = { - "retcode": 0, - "stdout": salt.utils.json.dumps( - [ - { - "name": "MyTestPool", - "state": "Started", - "Applications": {"value": ["MyTestSite"], "Count": 1}, - } - ] - ), -} - -LIST_VDIRS_SRVMGR = { - "retcode": 0, - "stdout": salt.utils.json.dumps( - [{"name": "TestVdir", "physicalPath": r"C:\inetpub\vdirs\TestVdir"}] - ), -} - -LIST_MORE_VDIRS_SRVMGR = { - "retcode": 0, - "stdout": salt.utils.json.dumps( - [ - {"name": "TestVdir", "physicalPath": r"C:\inetpub\vdirs\TestVdir"}, - { - "name": "Test/Nested/Vdir", - "physicalPath": r"C:\inetpub\vdirs\NestedTestVdir", - }, - ] - ), -} - -CONTAINER_SETTING = { - "retcode": 0, - "stdout": salt.utils.json.dumps([{"managedPipelineMode": "Integrated"}]), -} - -CERT_BINDING_INFO = "*:443:mytestsite.local" - - -class WinIisTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.win_iis - """ - - def setup_loader_modules(self): - return {win_iis: {}} - - def test_create_apppool(self): - """ - Test - Create an IIS application pool. - """ - with patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_apppools", MagicMock(return_value=dict()) - ), patch.dict( - win_iis.__salt__ - ): - self.assertTrue(win_iis.create_apppool("MyTestPool")) - - def test_list_apppools(self): - """ - Test - List all configured IIS application pools. - """ - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value=LIST_APPPOOLS_SRVMGR) - ): - self.assertEqual(win_iis.list_apppools(), APPPOOL_LIST) - - def test_remove_apppool(self): - """ - Test - Remove an IIS application pool. - """ - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_apppools", - MagicMock( - return_value={ - "MyTestPool": {"applications": list(), "state": "Started"} - } - ), - ): - self.assertTrue(win_iis.remove_apppool("MyTestPool")) - - def test_restart_apppool(self): - """ - Test - Restart an IIS application pool. - """ - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ): - self.assertTrue(win_iis.restart_apppool("MyTestPool")) - - def test_create_site(self): - """ - Test - Create a basic website in IIS. - """ - kwargs = { - "name": "MyTestSite", - "sourcepath": r"C:\inetpub\wwwroot", - "apppool": "MyTestPool", - "hostheader": "mytestsite.local", - "ipaddress": "*", - "port": 80, - "protocol": "http", - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_sites", MagicMock(return_value=dict()) - ), patch( - "salt.modules.win_iis.list_apppools", MagicMock(return_value=dict()) - ): - self.assertTrue(win_iis.create_site(**kwargs)) - - def test_create_site_failed(self): - """ - Test - Create a basic website in IIS using invalid data. - """ - kwargs = { - "name": "MyTestSite", - "sourcepath": r"C:\inetpub\wwwroot", - "apppool": "MyTestPool", - "hostheader": "mytestsite.local", - "ipaddress": "*", - "port": 80, - "protocol": "invalid-protocol-name", - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_sites", MagicMock(return_value=dict()) - ), patch( - "salt.modules.win_iis.list_apppools", MagicMock(return_value=dict()) - ): - self.assertRaises(SaltInvocationError, win_iis.create_site, **kwargs) - - def test_remove_site(self): - """ - Test - Delete a website from IIS. - """ - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch("salt.modules.win_iis.list_sites", MagicMock(return_value=SITE_LIST)): - self.assertTrue(win_iis.remove_site("MyTestSite")) - - def test_create_app(self): - """ - Test - Create an IIS application. - """ - kwargs = { - "name": "testApp", - "site": "MyTestSite", - "sourcepath": r"C:\inetpub\apps\testApp", - "apppool": "MyTestPool", - } - with patch.dict(win_iis.__salt__), patch( - "os.path.isdir", MagicMock(return_value=True) - ), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_apps", MagicMock(return_value=APP_LIST) - ): - self.assertTrue(win_iis.create_app(**kwargs)) - - def test_list_apps(self): - """ - Test - Get all configured IIS applications for the specified site. - """ - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value=LIST_APPS_SRVMGR) - ): - self.assertEqual(win_iis.list_apps("MyTestSite"), APP_LIST) - - def test_remove_app(self): - """ - Test - Remove an IIS application. - """ - kwargs = {"name": "otherApp", "site": "MyTestSite"} - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch("salt.modules.win_iis.list_apps", MagicMock(return_value=APP_LIST)): - self.assertTrue(win_iis.remove_app(**kwargs)) - - def test_create_binding(self): - """ - Test - Create an IIS binding. - """ - kwargs = { - "site": "MyTestSite", - "hostheader": "", - "ipaddress": "*", - "port": 80, - "protocol": "http", - "sslflags": 0, - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_bindings", MagicMock(return_value=BINDING_LIST) - ): - self.assertTrue(win_iis.create_binding(**kwargs)) - - def test_create_binding_failed(self): - """ - Test - Create an IIS binding using invalid data. - """ - kwargs = { - "site": "MyTestSite", - "hostheader": "", - "ipaddress": "*", - "port": 80, - "protocol": "invalid-protocol-name", - "sslflags": 999, - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_bindings", MagicMock(return_value=BINDING_LIST) - ): - self.assertRaises(SaltInvocationError, win_iis.create_binding, **kwargs) - - def test_list_bindings(self): - """ - Test - Get all configured IIS bindings for the specified site. - """ - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis.list_sites", MagicMock(return_value=SITE_LIST) - ): - self.assertEqual(win_iis.list_bindings("MyTestSite"), BINDING_LIST) - - def test_remove_binding(self): - """ - Test - Remove an IIS binding. - """ - kwargs = { - "site": "MyTestSite", - "hostheader": "myothertestsite.local", - "ipaddress": "*", - "port": 443, - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_bindings", MagicMock(return_value=BINDING_LIST) - ): - self.assertTrue(win_iis.remove_binding(**kwargs)) - - def test_create_vdir(self): - """ - Test - Create an IIS virtual directory. - """ - kwargs = { - "name": "TestVdir", - "site": "MyTestSite", - "sourcepath": r"C:\inetpub\vdirs\TestVdir", - } - with patch.dict(win_iis.__salt__), patch( - "os.path.isdir", MagicMock(return_value=True) - ), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_vdirs", MagicMock(return_value=VDIR_LIST) - ): - self.assertTrue(win_iis.create_vdir(**kwargs)) - - def test_list_vdirs(self): - """ - Test - Get configured IIS virtual directories. - """ - vdirs = {"TestVdir": {"sourcepath": r"C:\inetpub\vdirs\TestVdir"}} - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value=LIST_VDIRS_SRVMGR) - ): - self.assertEqual(win_iis.list_vdirs("MyTestSite"), vdirs) - - def test_remove_vdir(self): - """ - Test - Remove an IIS virtual directory. - """ - kwargs = {"name": "TestOtherVdir", "site": "MyTestSite"} - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch("salt.modules.win_iis.list_vdirs", MagicMock(return_value=VDIR_LIST)): - self.assertTrue(win_iis.remove_vdir(**kwargs)) - - def test_create_nested_vdir(self): - """ - Test - Create a nested IIS virtual directory. - """ - kwargs = { - "name": "Test/Nested/Vdir", - "site": "MyTestSite", - "sourcepath": r"C:\inetpub\vdirs\NestedTestVdir", - } - with patch.dict(win_iis.__salt__), patch( - "os.path.isdir", MagicMock(return_value=True) - ), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_vdirs", MagicMock(return_value=NESTED_VDIR_LIST) - ): - self.assertTrue(win_iis.create_vdir(**kwargs)) - - def test_list_nested_vdirs(self): - """ - Test - Get configured IIS virtual directories. - """ - vdirs = { - "TestVdir": {"sourcepath": r"C:\inetpub\vdirs\TestVdir"}, - "Test/Nested/Vdir": {"sourcepath": r"C:\inetpub\vdirs\NestedTestVdir"}, - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", - MagicMock(return_value=LIST_MORE_VDIRS_SRVMGR), - ): - self.assertEqual(win_iis.list_vdirs("MyTestSite"), vdirs) - - def test_create_cert_binding(self): - """ - Test - Assign a certificate to an IIS binding. - """ - kwargs = { - "name": "9988776655443322111000AAABBBCCCDDDEEEFFF", - "site": "MyTestSite", - "hostheader": "mytestsite.local", - "ipaddress": "*", - "port": 443, - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._list_certs", - MagicMock(return_value={"9988776655443322111000AAABBBCCCDDDEEEFFF": None}), - ), patch( - "salt.modules.win_iis._srvmgr", - MagicMock(return_value={"retcode": 0, "stdout": 10}), - ), patch( - "salt.utils.json.loads", - MagicMock(return_value=[{"MajorVersion": 10, "MinorVersion": 0}]), - ), patch( - "salt.modules.win_iis.list_bindings", MagicMock(return_value=BINDING_LIST) - ), patch( - "salt.modules.win_iis.list_cert_bindings", - MagicMock( - return_value={CERT_BINDING_INFO: BINDING_LIST[CERT_BINDING_INFO]} - ), - ): - self.assertTrue(win_iis.create_cert_binding(**kwargs)) - - def test_list_cert_bindings(self): - """ - Test - List certificate bindings for an IIS site. - """ - key = "*:443:mytestsite.local" - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis.list_sites", MagicMock(return_value=SITE_LIST) - ): - self.assertEqual( - win_iis.list_cert_bindings("MyTestSite"), {key: BINDING_LIST[key]} - ) - - def test_remove_cert_binding(self): - """ - Test - Remove a certificate from an IIS binding. - """ - kwargs = { - "name": "FFFEEEDDDCCCBBBAAA0001112233445566778899", - "site": "MyOtherTestSite", - "hostheader": "myothertestsite.local", - "ipaddress": "*", - "port": 443, - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.list_cert_bindings", - MagicMock( - return_value={CERT_BINDING_INFO: BINDING_LIST[CERT_BINDING_INFO]} - ), - ): - self.assertTrue(win_iis.remove_cert_binding(**kwargs)) - - def test_get_container_setting(self): - """ - Test - Get the value of the setting for the IIS container. - """ - kwargs = { - "name": "MyTestSite", - "container": "AppPools", - "settings": ["managedPipelineMode"], - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value=CONTAINER_SETTING) - ): - self.assertEqual( - win_iis.get_container_setting(**kwargs), - {"managedPipelineMode": "Integrated"}, - ) - - def test_set_container_setting(self): - """ - Test - Set the value of the setting for an IIS container. - """ - kwargs = { - "name": "MyTestSite", - "container": "AppPools", - "settings": {"managedPipelineMode": "Integrated"}, - } - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._srvmgr", MagicMock(return_value={"retcode": 0}) - ), patch( - "salt.modules.win_iis.get_container_setting", - MagicMock(return_value={"managedPipelineMode": "Integrated"}), - ): - self.assertTrue(win_iis.set_container_setting(**kwargs)) - - def test__collection_match_to_index(self): - bad_match = {"key_0": "value"} - first_match = {"key_1": "value"} - second_match = {"key_2": "value"} - collection = [first_match, second_match] - settings = [{"name": "enabled", "value": collection}] - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis.get_webconfiguration_settings", - MagicMock(return_value=settings), - ): - ret = win_iis._collection_match_to_index( - "pspath", "colfilter", "name", bad_match - ) - self.assertEqual(ret, -1) - ret = win_iis._collection_match_to_index( - "pspath", "colfilter", "name", first_match - ) - self.assertEqual(ret, 0) - ret = win_iis._collection_match_to_index( - "pspath", "colfilter", "name", second_match - ) - self.assertEqual(ret, 1) - - def test__prepare_settings(self): - simple_setting = {"name": "value", "filter": "value"} - collection_setting = {"name": "Collection[{yaml:\n\tdata}]", "filter": "value"} - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._collection_match_to_index", MagicMock(return_value=0) - ): - ret = win_iis._prepare_settings( - "pspath", - [ - simple_setting, - collection_setting, - {"invalid": "setting"}, - {"name": "filter-less_setting"}, - ], - ) - self.assertEqual(ret, [simple_setting, collection_setting]) - - @patch("salt.modules.win_iis.log") - def test_get_webconfiguration_settings_empty(self, mock_log): - ret = win_iis.get_webconfiguration_settings("name", settings=[]) - mock_log.warning.assert_called_once_with("No settings provided") - self.assertEqual(ret, {}) - - def test_get_webconfiguration_settings(self): - # Setup - name = "IIS" - collection_setting = {"name": "Collection[{yaml:\n\tdata}]", "filter": "value"} - filter_setting = { - "name": "enabled", - "filter": ( - "system.webServer / security / authentication / anonymousAuthentication" - ), - } - settings = [collection_setting, filter_setting] - - ps_cmd = [ - "$Settings = New-Object System.Collections.ArrayList;", - ] - for setting in settings: - ps_cmd.extend( - [ - "$Property = Get-WebConfigurationProperty -PSPath '{}'".format( - name - ), - "-Name '{name}' -Filter '{filter}' -ErrorAction Stop;".format( - filter=setting["filter"], name=setting["name"] - ), - "if (([String]::IsNullOrEmpty($Property) -eq $False) -and", - "($Property.GetType()).Name -eq 'ConfigurationAttribute') {", - "$Property = $Property | Select-Object", - "-ExpandProperty Value };", - "$Settings.add(@{{filter='{filter}';name='{name}';value=[String]" - " $Property}})| Out-Null;".format( - filter=setting["filter"], name=setting["name"] - ), - "$Property = $Null;", - ] - ) - ps_cmd.append("$Settings") - - # Execute - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._prepare_settings", MagicMock(return_value=settings) - ), patch( - "salt.modules.win_iis._srvmgr", - MagicMock(return_value={"retcode": 0, "stdout": "{}"}), - ): - ret = win_iis.get_webconfiguration_settings(name, settings=settings) - - # Verify - win_iis._srvmgr.assert_called_with(cmd=ps_cmd, return_json=True) - self.assertEqual(ret, {}) - - @patch("salt.modules.win_iis.log") - def test_set_webconfiguration_settings_empty(self, mock_log): - ret = win_iis.set_webconfiguration_settings("name", settings=[]) - mock_log.warning.assert_called_once_with("No settings provided") - self.assertEqual(ret, False) - - @patch("salt.modules.win_iis.log") - def test_set_webconfiguration_settings_no_changes(self, mock_log): - # Setup - name = "IIS" - setting = { - "name": "Collection[{yaml:\n\tdata}]", - "filter": ( - "system.webServer / security / authentication / anonymousAuthentication" - ), - "value": [], - } - settings = [setting] - - # Execute - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._prepare_settings", MagicMock(return_value=settings) - ), patch( - "salt.modules.win_iis._srvmgr", - MagicMock(return_value={"retcode": 0, "stdout": "{}"}), - ), patch( - "salt.modules.win_iis.get_webconfiguration_settings", - MagicMock(return_value=settings), - ): - ret = win_iis.set_webconfiguration_settings(name, settings=settings) - - # Verify - mock_log.debug.assert_called_with( - "Settings already contain the provided values." - ) - self.assertEqual(ret, True) - - @patch("salt.modules.win_iis.log") - def test_set_webconfiguration_settings_failed(self, mock_log): - # Setup - name = "IIS" - setting = { - "name": "Collection[{yaml:\n\tdata}]", - "filter": ( - "system.webServer / security / authentication / anonymousAuthentication" - ), - "value": [], - } - settings = [setting] - - # Execute - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._prepare_settings", MagicMock(return_value=settings) - ), patch( - "salt.modules.win_iis._srvmgr", - MagicMock(return_value={"retcode": 0, "stdout": "{}"}), - ), patch( - "salt.modules.win_iis.get_webconfiguration_settings", - MagicMock(side_effect=[[], [{"value": "unexpected_change!"}]]), - ): - - ret = win_iis.set_webconfiguration_settings(name, settings=settings) - - # Verify - self.assertEqual(ret, False) - mock_log.error.assert_called_with("Failed to change settings: %s", settings) - - @patch("salt.modules.win_iis.log") - def test_set_webconfiguration_settings(self, mock_log): - # Setup - name = "IIS" - setting = { - "name": "Collection[{yaml:\n\tdata}]", - "filter": ( - "system.webServer / security / authentication / anonymousAuthentication" - ), - "value": [], - } - settings = [setting] - - # Execute - with patch.dict(win_iis.__salt__), patch( - "salt.modules.win_iis._prepare_settings", MagicMock(return_value=settings) - ), patch( - "salt.modules.win_iis._srvmgr", - MagicMock(return_value={"retcode": 0, "stdout": "{}"}), - ), patch( - "salt.modules.win_iis.get_webconfiguration_settings", - MagicMock(side_effect=[[], settings]), - ): - ret = win_iis.set_webconfiguration_settings(name, settings=settings) - - # Verify - self.assertEqual(ret, True) - mock_log.debug.assert_called_with( - "Settings configured successfully: %s", settings - ) - - def test_get_webconfiguration_settings_no_settings(self): - self.assertEqual(win_iis.get_webconfiguration_settings("salt", {}), {}) - - def test_get_webconfiguration_settings_pass(self): - settings = [ - { - "name": "enabled", - "filter": ( - "system.webServer/security/authentication/anonymousAuthentication" - ), - } - ] - - ps_cmd_validate = [ - "Get-WebConfigurationProperty", - "-PSPath", - "'salt'", - "-Filter", - "'system.webServer/security/authentication/anonymousAuthentication'", - "-Name", - "'enabled'", - "-ErrorAction", - "Stop", - "|", - "Out-Null;", - ] - - ps_cmd = [ - "$Settings = New-Object System.Collections.ArrayList;", - "$Property = Get-WebConfigurationProperty -PSPath 'salt'", - "-Name 'enabled' -Filter" - " 'system.webServer/security/authentication/anonymousAuthentication'" - " -ErrorAction Stop;", - "if (([String]::IsNullOrEmpty($Property) -eq $False) -and", - "($Property.GetType()).Name -eq 'ConfigurationAttribute') {", - "$Property = $Property | Select-Object", - "-ExpandProperty Value };", - "$Settings.add(@{filter='system.webServer/security/authentication/anonymousAuthentication';name='enabled';value=[String]" - " $Property})| Out-Null;", - "$Property = $Null;", - "$Settings", - ] - - func_ret = {"name": "enabled", "value": True} - with patch.object( - win_iis, "_srvmgr", return_value={"retcode": 0, "stdout": "json data"} - ) as _srvmgr: - with patch.object( - win_iis.salt.utils.json, "loads", return_value=func_ret - ) as loads: - ret = win_iis.get_webconfiguration_settings("salt", settings) - - self.assertEqual(_srvmgr.call_count, 2) - self.assertEqual( - _srvmgr.mock_calls[0], call(cmd=ps_cmd_validate, return_json=True) - ) - self.assertEqual( - _srvmgr.mock_calls[1], call(cmd=ps_cmd, return_json=True) - ) - - loads.assert_called_once_with("json data", strict=False) - self.assertEqual(func_ret, ret) - - def test_set_webconfiguration_settings_no_settings(self): - self.assertEqual(win_iis.set_webconfiguration_settings("salt", {}), False) - - def test_set_webconfiguration_settings_pass(self): - settings = [ - { - "name": "enabled", - "filter": ( - "system.webServer/security/authentication/anonymousAuthentication" - ), - "value": False, - } - ] - - current_settings = [ - { - "name": "enabled", - "filter": ( - "system.webServer/security/authentication/anonymousAuthentication" - ), - "value": True, - } - ] - - new_settings = [ - { - "name": "enabled", - "filter": ( - "system.webServer/security/authentication/anonymousAuthentication" - ), - "value": False, - } - ] - - ps_cmd = [ - "Set-WebConfigurationProperty", - "-PSPath", - "'salt'", - "-Filter", - "'system.webServer/security/authentication/anonymousAuthentication'", - "-Name", - "'enabled'", - "-Value", - "'False';", - ] - - with patch.object( - win_iis, - "get_webconfiguration_settings", - side_effect=[current_settings, new_settings], - ) as get_webconfiguration_settings: - with patch.object( - win_iis, "_srvmgr", return_value={"retcode": 0} - ) as _srvmgr: - ret = win_iis.set_webconfiguration_settings("salt", settings) - - self.assertEqual(get_webconfiguration_settings.call_count, 2) - self.assertEqual( - get_webconfiguration_settings.mock_calls[0], - call(name="salt", settings=settings), - ) - self.assertEqual( - get_webconfiguration_settings.mock_calls[1], - call(name="salt", settings=settings), - ) - - _srvmgr.assert_called_once_with(ps_cmd) - - self.assertTrue(ret) - - def test_set_webconfiguration_settings_fail(self): - settings = [ - { - "name": "enabled", - "filter": ( - "system.webServer/security/authentication/anonymousAuthentication" - ), - "value": False, - } - ] - - current_settings = [ - { - "name": "enabled", - "filter": ( - "system.webServer/security/authentication/anonymousAuthentication" - ), - "value": True, - } - ] - - new_settings = [ - { - "name": "enabled", - "filter": ( - "system.webServer/security/authentication/anonymousAuthentication" - ), - "value": True, - } - ] - - ps_cmd = [ - "Set-WebConfigurationProperty", - "-PSPath", - "'salt'", - "-Filter", - "'system.webServer/security/authentication/anonymousAuthentication'", - "-Name", - "'enabled'", - "-Value", - "'False';", - ] - - with patch.object( - win_iis, - "get_webconfiguration_settings", - side_effect=[current_settings, new_settings], - ) as get_webconfiguration_settings: - with patch.object( - win_iis, "_srvmgr", return_value={"retcode": 0} - ) as _srvmgr: - ret = win_iis.set_webconfiguration_settings("salt", settings) - - self.assertEqual(get_webconfiguration_settings.call_count, 2) - self.assertEqual( - get_webconfiguration_settings.mock_calls[0], - call(name="salt", settings=settings), - ) - self.assertEqual( - get_webconfiguration_settings.mock_calls[1], - call(name="salt", settings=settings), - ) - - _srvmgr.assert_called_once_with(ps_cmd) - - self.assertFalse(ret) diff --git a/tests/unit/modules/test_win_ip.py b/tests/unit/modules/test_win_ip.py deleted file mode 100644 index a29fc6e0238f..000000000000 --- a/tests/unit/modules/test_win_ip.py +++ /dev/null @@ -1,377 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import pytest - -import salt.modules.win_ip as win_ip -from salt.exceptions import CommandExecutionError, SaltInvocationError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, call, patch -from tests.support.unit import TestCase - -ETHERNET_CONFIG = ( - 'Configuration for interface "Ethernet"\n' - "DHCP enabled: Yes\n" - "IP Address: 1.2.3.74\n" - "Subnet Prefix: 1.2.3.0/24 (mask 255.255.255.0)\n" - "Default Gateway: 1.2.3.1\n" - "Gateway Metric: 0\n" - "InterfaceMetric: 20\n" - "DNS servers configured through DHCP: 1.2.3.4\n" - "Register with which suffix: Primary only\n" - "WINS servers configured through DHCP: None\n" -) - -ETHERNET_ENABLE = ( - "Ethernet\nType: Dedicated\nAdministrative state: Enabled\nConnect state: Connected" -) - - -class WinShadowTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.win_ip - """ - - def setup_loader_modules(self): - return {win_ip: {}} - - # 'raw_interface_configs' function tests: 1 - - def test_raw_interface_configs(self): - """ - Test if it return raw configs for all interfaces. - """ - mock_cmd = MagicMock(return_value=ETHERNET_CONFIG) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertEqual(win_ip.raw_interface_configs(), ETHERNET_CONFIG) - - # 'get_all_interfaces' function tests: 1 - - def test_get_all_interfaces(self): - """ - Test if it return configs for all interfaces. - """ - ret = { - "Ethernet": { - "DHCP enabled": "Yes", - "DNS servers configured through DHCP": ["1.2.3.4"], - "Default Gateway": "1.2.3.1", - "Gateway Metric": "0", - "InterfaceMetric": "20", - "Register with which suffix": "Primary only", - "WINS servers configured through DHCP": ["None"], - "ip_addrs": [ - { - "IP Address": "1.2.3.74", - "Netmask": "255.255.255.0", - "Subnet": "1.2.3.0/24", - } - ], - } - } - - mock_cmd = MagicMock(return_value=ETHERNET_CONFIG) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertDictEqual(win_ip.get_all_interfaces(), ret) - - # 'get_interface' function tests: 1 - - def test_get_interface(self): - """ - Test if it return the configuration of a network interface. - """ - ret = { - "DHCP enabled": "Yes", - "DNS servers configured through DHCP": ["1.2.3.4"], - "Default Gateway": "1.2.3.1", - "Gateway Metric": "0", - "InterfaceMetric": "20", - "Register with which suffix": "Primary only", - "WINS servers configured through DHCP": ["None"], - "ip_addrs": [ - { - "IP Address": "1.2.3.74", - "Netmask": "255.255.255.0", - "Subnet": "1.2.3.0/24", - } - ], - } - - mock_cmd = MagicMock(return_value=ETHERNET_CONFIG) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertDictEqual(win_ip.get_interface("Ethernet"), ret) - - # 'is_enabled' function tests: 1 - - def test_is_enabled(self): - """ - Test if it returns `True` if interface is enabled, otherwise `False`. - """ - mock_cmd = MagicMock(side_effect=[ETHERNET_ENABLE, ""]) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertTrue(win_ip.is_enabled("Ethernet")) - self.assertRaises(CommandExecutionError, win_ip.is_enabled, "Ethernet") - - # 'is_disabled' function tests: 1 - - def test_is_disabled(self): - """ - Test if it returns `True` if interface is disabled, otherwise `False`. - """ - mock_cmd = MagicMock(return_value=ETHERNET_ENABLE) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertFalse(win_ip.is_disabled("Ethernet")) - - # 'enable' function tests: 1 - - def test_enable(self): - """ - Test if it enable an interface. - """ - # Test with enabled interface - with patch.object(win_ip, "is_enabled", return_value=True): - self.assertTrue(win_ip.enable("Ethernet")) - - mock_cmd = MagicMock() - with patch.object(win_ip, "is_enabled", side_effect=[False, True]), patch.dict( - win_ip.__salt__, {"cmd.run": mock_cmd} - ): - self.assertTrue(win_ip.enable("Ethernet")) - - mock_cmd.called_once_with( - [ - "netsh", - "interface", - "set", - "interface", - "name=Ethernet", - "admin=ENABLED", - ], - python_shell=False, - ) - - # 'disable' function tests: 1 - - def test_disable(self): - """ - Test if it disable an interface. - """ - with patch.object(win_ip, "is_disabled", return_value=True): - self.assertTrue(win_ip.disable("Ethernet")) - - mock_cmd = MagicMock() - with patch.object(win_ip, "is_disabled", side_effect=[False, True]), patch.dict( - win_ip.__salt__, {"cmd.run": mock_cmd} - ): - self.assertTrue(win_ip.disable("Ethernet")) - - mock_cmd.called_once_with( - [ - "netsh", - "interface", - "set", - "interface", - "name=Ethernet", - "admin=DISABLED", - ], - python_shell=False, - ) - - # 'get_subnet_length' function tests: 1 - - def test_get_subnet_length(self): - """ - Test if it disable an interface. - """ - self.assertEqual(win_ip.get_subnet_length("255.255.255.0"), 24) - self.assertRaises(SaltInvocationError, win_ip.get_subnet_length, "255.255.0") - - # 'set_static_ip' function tests: 1 - - @pytest.mark.slow_test - def test_set_static_ip(self): - """ - Test if it set static IP configuration on a Windows NIC. - """ - self.assertRaises( - SaltInvocationError, - win_ip.set_static_ip, - "Local Area Connection", - "10.1.2/24", - ) - - mock_cmd = MagicMock(return_value=ETHERNET_CONFIG) - mock_all = MagicMock(return_value={"retcode": 1, "stderr": "Error"}) - with patch.dict( - win_ip.__salt__, {"cmd.run": mock_cmd, "cmd.run_all": mock_all} - ): - self.assertRaises( - CommandExecutionError, - win_ip.set_static_ip, - "Ethernet", - "1.2.3.74/24", - append=True, - ) - self.assertRaises( - CommandExecutionError, win_ip.set_static_ip, "Ethernet", "1.2.3.74/24" - ) - - mock_all = MagicMock(return_value={"retcode": 0}) - with patch.dict( - win_ip.__salt__, {"cmd.run": mock_cmd, "cmd.run_all": mock_all} - ): - self.assertDictEqual( - win_ip.set_static_ip("Local Area Connection", "1.2.3.74/24"), {} - ) - self.assertDictEqual( - win_ip.set_static_ip("Ethernet", "1.2.3.74/24"), - { - "Address Info": { - "IP Address": "1.2.3.74", - "Netmask": "255.255.255.0", - "Subnet": "1.2.3.0/24", - } - }, - ) - - # 'set_dhcp_ip' function tests: 1 - - def test_set_dhcp_ip(self): - """ - Test if it set Windows NIC to get IP from DHCP. - """ - mock_cmd = MagicMock(return_value=ETHERNET_CONFIG) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertDictEqual( - win_ip.set_dhcp_ip("Ethernet"), - {"DHCP enabled": "Yes", "Interface": "Ethernet"}, - ) - - # 'set_static_dns' function tests: 1 - - def test_set_static_dns(self): - """ - Test if it set static DNS configuration on a Windows NIC. - """ - mock_cmd = MagicMock() - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertDictEqual( - win_ip.set_static_dns("Ethernet", "192.168.1.252", "192.168.1.253"), - { - "DNS Server": ("192.168.1.252", "192.168.1.253"), - "Interface": "Ethernet", - }, - ) - mock_cmd.assert_has_calls( - [ - call( - [ - "netsh", - "interface", - "ip", - "set", - "dns", - "name=Ethernet", - "source=static", - "address=192.168.1.252", - "register=primary", - ], - python_shell=False, - ), - call( - [ - "netsh", - "interface", - "ip", - "add", - "dns", - "name=Ethernet", - "address=192.168.1.253", - "index=2", - ], - python_shell=False, - ), - ] - ) - - def test_set_static_dns_clear(self): - """ - Test if it set static DNS configuration on a Windows NIC. - """ - mock_cmd = MagicMock() - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertDictEqual( - win_ip.set_static_dns("Ethernet", []), - {"DNS Server": [], "Interface": "Ethernet"}, - ) - mock_cmd.assert_called_once_with( - [ - "netsh", - "interface", - "ip", - "set", - "dns", - "name=Ethernet", - "source=static", - "address=none", - ], - python_shell=False, - ) - - def test_set_static_dns_no_action(self): - """ - Test if it set static DNS configuration on a Windows NIC. - """ - # Test passing nothing - self.assertDictEqual( - win_ip.set_static_dns("Ethernet"), - {"DNS Server": "No Changes", "Interface": "Ethernet"}, - ) - # Test passing None - self.assertDictEqual( - win_ip.set_static_dns("Ethernet", None), - {"DNS Server": "No Changes", "Interface": "Ethernet"}, - ) - - # Test passing string None - self.assertDictEqual( - win_ip.set_static_dns("Ethernet", "None"), - {"DNS Server": "No Changes", "Interface": "Ethernet"}, - ) - - # 'set_dhcp_dns' function tests: 1 - - def test_set_dhcp_dns(self): - """ - Test if it set DNS source to DHCP on Windows. - """ - mock_cmd = MagicMock(return_value=ETHERNET_CONFIG) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertDictEqual( - win_ip.set_dhcp_dns("Ethernet"), - {"DNS Server": "DHCP", "Interface": "Ethernet"}, - ) - - # 'set_dhcp_all' function tests: 1 - - def test_set_dhcp_all(self): - """ - Test if it set both IP Address and DNS to DHCP. - """ - mock_cmd = MagicMock(return_value=ETHERNET_CONFIG) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertDictEqual( - win_ip.set_dhcp_all("Ethernet"), - {"Interface": "Ethernet", "DNS Server": "DHCP", "DHCP enabled": "Yes"}, - ) - - # 'get_default_gateway' function tests: 1 - - def test_get_default_gateway(self): - """ - Test if it set DNS source to DHCP on Windows. - """ - mock_cmd = MagicMock(return_value=ETHERNET_CONFIG) - with patch.dict(win_ip.__salt__, {"cmd.run": mock_cmd}): - self.assertEqual(win_ip.get_default_gateway(), "1.2.3.1") diff --git a/tests/unit/modules/test_win_license.py b/tests/unit/modules/test_win_license.py deleted file mode 100644 index f80a37815744..000000000000 --- a/tests/unit/modules/test_win_license.py +++ /dev/null @@ -1,90 +0,0 @@ -import salt.modules.win_license as win_license -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class LicenseTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - return {win_license: {}} - - def test_installed(self): - """ - Test to see if the given license key is installed - """ - mock = MagicMock(return_value="Partial Product Key: ABCDE") - with patch.dict(win_license.__salt__, {"cmd.run": mock}): - out = win_license.installed("AAAAA-AAAAA-AAAAA-AAAA-AAAAA-ABCDE") - mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /dli") - self.assertTrue(out) - - def test_installed_diff(self): - """ - Test to see if the given license key is installed when the key is different - """ - mock = MagicMock(return_value="Partial Product Key: 12345") - with patch.dict(win_license.__salt__, {"cmd.run": mock}): - out = win_license.installed("AAAAA-AAAAA-AAAAA-AAAA-AAAAA-ABCDE") - mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /dli") - self.assertFalse(out) - - def test_install(self): - """ - Test installing the given product key - """ - mock = MagicMock() - with patch.dict(win_license.__salt__, {"cmd.run": mock}): - win_license.install("AAAAA-AAAAA-AAAAA-AAAA-AAAAA-ABCDE") - mock.assert_called_once_with( - r"cscript C:\Windows\System32\slmgr.vbs /ipk " - "AAAAA-AAAAA-AAAAA-AAAA-AAAAA-ABCDE" - ) - - def test_uninstall(self): - """ - Test uninstalling the given product key - """ - mock = MagicMock() - with patch.dict(win_license.__salt__, {"cmd.run": mock}): - win_license.uninstall() - mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /upk") - - def test_activate(self): - """ - Test activating the current product key - """ - mock = MagicMock() - with patch.dict(win_license.__salt__, {"cmd.run": mock}): - win_license.activate() - mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /ato") - - def test_licensed(self): - """ - Test checking if the minion is licensed - """ - mock = MagicMock(return_value="License Status: Licensed") - with patch.dict(win_license.__salt__, {"cmd.run": mock}): - win_license.licensed() - mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /dli") - - def test_info(self): - """ - Test getting the info about the current license key - """ - expected = { - "description": "Prof", - "licensed": True, - "name": "Win7", - "partial_key": "12345", - } - - mock = MagicMock( - return_value=( - "Name: Win7\r\nDescription: Prof\r\nPartial Product Key: 12345\r\n" - "License Status: Licensed" - ) - ) - with patch.dict(win_license.__salt__, {"cmd.run": mock}): - out = win_license.info() - mock.assert_called_once_with(r"cscript C:\Windows\System32\slmgr.vbs /dli") - self.assertEqual(out, expected) diff --git a/tests/unit/modules/test_win_pki.py b/tests/unit/modules/test_win_pki.py deleted file mode 100644 index 883a69e8c848..000000000000 --- a/tests/unit/modules/test_win_pki.py +++ /dev/null @@ -1,169 +0,0 @@ -""" - :synopsis: Unit Tests for Windows PKI Module 'module.win_pki' - :platform: Windows - :maturity: develop - .. versionadded:: 2017.7.0 -""" - - -import salt.modules.win_pki as win_pki -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -CERT_PATH = r"C:\certs\testdomain.local.cer" -THUMBPRINT = "9988776655443322111000AAABBBCCCDDDEEEFFF" - -CERTS = { - THUMBPRINT: { - "dnsnames": ["testdomain.local"], - "serialnumber": "0123456789AABBCCDD", - "subject": "CN=testdomain.local, OU=testou, O=testorg, S=California, C=US", - "thumbprint": THUMBPRINT, - "version": 3, - } -} - -STORES = { - "CurrentUser": [ - "AuthRoot", - "CA", - "ClientAuthIssuer", - "Disallowed", - "MSIEHistoryJournal", - "My", - "Root", - "SmartCardRoot", - "Trust", - "TrustedPeople", - "TrustedPublisher", - "UserDS", - ], - "LocalMachine": [ - "AuthRoot", - "CA", - "ClientAuthIssuer", - "Disallowed", - "My", - "Remote Desktop", - "Root", - "SmartCardRoot", - "Trust", - "TrustedDevices", - "TrustedPeople", - "TrustedPublisher", - "WebHosting", - ], -} - -JSON_CERTS = [ - { - "DnsNameList": [ - {"Punycode": "testdomain.local", "Unicode": "testdomain.local"} - ], - "SerialNumber": "0123456789AABBCCDD", - "Subject": "CN=testdomain.local, OU=testou, O=testorg, S=California, C=US", - "Thumbprint": "9988776655443322111000AAABBBCCCDDDEEEFFF", - "Version": 3, - } -] - -JSON_STORES = [ - {"LocationName": "CurrentUser", "StoreNames": STORES["CurrentUser"]}, - {"LocationName": "LocalMachine", "StoreNames": STORES["LocalMachine"]}, -] - - -class WinPkiTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.win_pki - """ - - def setup_loader_modules(self): - return {win_pki: {}} - - def test_get_stores(self): - """ - Test - Get the certificate location contexts and their corresponding stores. - """ - with patch.dict(win_pki.__salt__), patch( - "salt.modules.win_pki._cmd_run", MagicMock(return_value=JSON_STORES) - ): - self.assertEqual(win_pki.get_stores(), STORES) - - def test_get_certs(self): - """ - Test - Get the available certificates in the given store. - """ - with patch.dict(win_pki.__salt__), patch( - "salt.modules.win_pki._cmd_run", MagicMock(return_value=JSON_CERTS) - ), patch( - "salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None) - ): - self.assertEqual(win_pki.get_certs(), CERTS) - - def test_get_cert_file(self): - """ - Test - Get the details of the certificate file. - """ - kwargs = {"name": CERT_PATH} - with patch.dict(win_pki.__salt__), patch( - "os.path.isfile", MagicMock(return_value=True) - ), patch("salt.modules.win_pki._cmd_run", MagicMock(return_value=JSON_CERTS)): - self.assertEqual(win_pki.get_cert_file(**kwargs), CERTS[THUMBPRINT]) - - def test_import_cert(self): - """ - Test - Import the certificate file into the given certificate store. - """ - kwargs = {"name": CERT_PATH} - mock_value = MagicMock(return_value=CERT_PATH) - with patch.dict(win_pki.__salt__, {"cp.cache_file": mock_value}), patch( - "salt.modules.win_pki._cmd_run", MagicMock(return_value=JSON_CERTS) - ), patch( - "salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None) - ), patch( - "salt.modules.win_pki.get_cert_file", - MagicMock(return_value=CERTS[THUMBPRINT]), - ), patch( - "salt.modules.win_pki.get_certs", MagicMock(return_value=CERTS) - ): - self.assertTrue(win_pki.import_cert(**kwargs)) - - def test_export_cert(self): - """ - Test - Export the certificate to a file from the given certificate store. - """ - kwargs = {"name": CERT_PATH, "thumbprint": THUMBPRINT} - with patch.dict(win_pki.__salt__), patch( - "salt.modules.win_pki._cmd_run", MagicMock(return_value="True") - ), patch( - "salt.modules.win_pki._validate_cert_format", MagicMock(return_value=None) - ), patch( - "salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None) - ): - self.assertTrue(win_pki.export_cert(**kwargs)) - - def test_test_cert(self): - """ - Test - Check the certificate for validity. - """ - with patch.dict(win_pki.__salt__), patch( - "salt.modules.win_pki._cmd_run", MagicMock(return_value="True") - ), patch( - "salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None) - ): - self.assertTrue(win_pki.test_cert(thumbprint=THUMBPRINT)) - - def test_remove_cert(self): - """ - Test - Remove the certificate from the given certificate store. - """ - with patch.dict(win_pki.__salt__), patch( - "salt.modules.win_pki._cmd_run", MagicMock(return_value=None) - ), patch( - "salt.modules.win_pki._validate_cert_path", MagicMock(return_value=None) - ), patch( - "salt.modules.win_pki.get_certs", MagicMock(return_value=CERTS) - ): - self.assertTrue(win_pki.remove_cert(thumbprint=THUMBPRINT[::-1])) diff --git a/tests/unit/modules/test_win_powercfg.py b/tests/unit/modules/test_win_powercfg.py deleted file mode 100644 index 759284ee41e6..000000000000 --- a/tests/unit/modules/test_win_powercfg.py +++ /dev/null @@ -1,270 +0,0 @@ -import salt.modules.win_powercfg as powercfg -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, call, patch -from tests.support.unit import TestCase - - -class PowerCfgTestCase(TestCase, LoaderModuleMockMixin): - """ - Validate the powercfg state - """ - - def setup_loader_modules(self): - return {powercfg: {"__grains__": {"osrelease": 8}}} - - query_output = """Subgroup GUID: 238c9fa8-0aad-41ed-83f4-97be242c8f20 (Hibernate) - GUID Alias: SUB_SLEEP - Power Setting GUID: 29f6c1db-86da-48c5-9fdb-f2b67b1f44da (Hibernate after) - GUID Alias: HIBERNATEIDLE - Minimum Possible Setting: 0x00000000 - Maximum Possible Setting: 0xffffffff - Possible Settings increment: 0x00000001 - Possible Settings units: Seconds - Current AC Power Setting Index: 0x00000708 - Current DC Power Setting Index: 0x00000384""" - - def test_set_monitor_timeout(self): - """ - Test to make sure we can set the monitor timeout value - """ - mock = MagicMock(return_value=0) - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - mock_retcode = MagicMock(return_value=0) - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - with patch.dict(powercfg.__salt__, {"cmd.retcode": mock_retcode}): - powercfg.set_monitor_timeout(0, "dc") - mock.assert_called_once_with( - "powercfg /getactivescheme", python_shell=False - ) - mock_retcode.assert_called_once_with( - "powercfg /setdcvalueindex 381b4222-f694-41f0-9685-ff5bb260df2e" - " SUB_VIDEO VIDEOIDLE 0", - python_shell=False, - ) - - def test_set_disk_timeout(self): - """ - Test to make sure we can set the disk timeout value - """ - mock = MagicMock() - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - mock_retcode = MagicMock(return_value=0) - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - with patch.dict(powercfg.__salt__, {"cmd.retcode": mock_retcode}): - powercfg.set_disk_timeout(0, "dc") - mock.assert_called_once_with( - "powercfg /getactivescheme", python_shell=False - ) - mock_retcode.assert_called_once_with( - "powercfg /setdcvalueindex 381b4222-f694-41f0-9685-ff5bb260df2e" - " SUB_DISK DISKIDLE 0", - python_shell=False, - ) - - def test_set_standby_timeout(self): - """ - Test to make sure we can set the standby timeout value - """ - mock = MagicMock(return_value=0) - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - mock_retcode = MagicMock(return_value=0) - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - with patch.dict(powercfg.__salt__, {"cmd.retcode": mock_retcode}): - powercfg.set_standby_timeout(0, "dc") - mock.assert_called_once_with( - "powercfg /getactivescheme", python_shell=False - ) - mock_retcode.assert_called_once_with( - "powercfg /setdcvalueindex 381b4222-f694-41f0-9685-ff5bb260df2e" - " SUB_SLEEP STANDBYIDLE 0", - python_shell=False, - ) - - def test_set_hibernate_timeout(self): - """ - Test to make sure we can set the hibernate timeout value - """ - mock = MagicMock(return_value=0) - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - mock_retcode = MagicMock(return_value=0) - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - with patch.dict(powercfg.__salt__, {"cmd.retcode": mock_retcode}): - powercfg.set_hibernate_timeout(0, "dc") - mock.assert_called_once_with( - "powercfg /getactivescheme", python_shell=False - ) - mock_retcode.assert_called_once_with( - "powercfg /setdcvalueindex 381b4222-f694-41f0-9685-ff5bb260df2e" - " SUB_SLEEP HIBERNATEIDLE 0", - python_shell=False, - ) - - def test_get_monitor_timeout(self): - """ - Test to make sure we can get the monitor timeout value - """ - mock = MagicMock() - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - ret = powercfg.get_monitor_timeout() - calls = [ - call("powercfg /getactivescheme", python_shell=False), - call( - "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_VIDEO" - " VIDEOIDLE", - python_shell=False, - ), - ] - mock.assert_has_calls(calls) - - self.assertEqual({"ac": 30, "dc": 15}, ret) - - def test_get_disk_timeout(self): - """ - Test to make sure we can get the disk timeout value - """ - mock = MagicMock() - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - ret = powercfg.get_disk_timeout() - calls = [ - call("powercfg /getactivescheme", python_shell=False), - call( - "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_DISK" - " DISKIDLE", - python_shell=False, - ), - ] - mock.assert_has_calls(calls) - - self.assertEqual({"ac": 30, "dc": 15}, ret) - - def test_get_standby_timeout(self): - """ - Test to make sure we can get the standby timeout value - """ - mock = MagicMock() - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - ret = powercfg.get_standby_timeout() - calls = [ - call("powercfg /getactivescheme", python_shell=False), - call( - "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_SLEEP" - " STANDBYIDLE", - python_shell=False, - ), - ] - mock.assert_has_calls(calls) - - self.assertEqual({"ac": 30, "dc": 15}, ret) - - def test_get_hibernate_timeout(self): - """ - Test to make sure we can get the hibernate timeout value - """ - mock = MagicMock() - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - ret = powercfg.get_hibernate_timeout() - calls = [ - call("powercfg /getactivescheme", python_shell=False), - call( - "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_SLEEP" - " HIBERNATEIDLE", - python_shell=False, - ), - ] - mock.assert_has_calls(calls) - - self.assertEqual({"ac": 30, "dc": 15}, ret) - - def test_windows_7(self): - """ - Test to make sure we can get the hibernate timeout value on windows 7 - """ - mock = MagicMock() - mock.side_effect = [ - "Power Scheme GUID: 381b4222-f694-41f0-9685-ff5bb260df2e (Balanced)", - self.query_output, - ] - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - with patch.dict(powercfg.__grains__, {"osrelease": "7"}): - ret = powercfg.get_hibernate_timeout() - calls = [ - call("powercfg /getactivescheme", python_shell=False), - call( - "powercfg /q 381b4222-f694-41f0-9685-ff5bb260df2e SUB_SLEEP", - python_shell=False, - ), - ] - mock.assert_has_calls(calls) - - self.assertEqual({"ac": 30, "dc": 15}, ret) - - def test_set_hibernate_timeout_scheme(self): - """ - Test to make sure we can set the hibernate timeout value - """ - mock = MagicMock(return_value=0) - mock.side_effect = [self.query_output] - - with patch.dict(powercfg.__salt__, {"cmd.retcode": mock}): - powercfg.set_hibernate_timeout(0, "dc", scheme="SCHEME_MIN") - mock.assert_called_once_with( - "powercfg /setdcvalueindex SCHEME_MIN SUB_SLEEP HIBERNATEIDLE 0", - python_shell=False, - ) - - def test_get_hibernate_timeout_scheme(self): - """ - Test to make sure we can get the hibernate timeout value with a - specified scheme - """ - mock = MagicMock() - mock.side_effect = [self.query_output] - - with patch.dict(powercfg.__salt__, {"cmd.run": mock}): - ret = powercfg.get_hibernate_timeout(scheme="SCHEME_MIN") - mock.assert_called_once_with( - "powercfg /q SCHEME_MIN SUB_SLEEP HIBERNATEIDLE", python_shell=False - ) - - self.assertEqual({"ac": 30, "dc": 15}, ret) diff --git a/tests/unit/modules/test_win_service.py b/tests/unit/modules/test_win_service.py deleted file mode 100644 index b52e986b8c8f..000000000000 --- a/tests/unit/modules/test_win_service.py +++ /dev/null @@ -1,370 +0,0 @@ -""" - :codeauthor: Rahul Handay -""" - -import pytest - -import salt.modules.win_service as win_service -import salt.utils.path -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -try: - import pywintypes - import win32serviceutil - - WINAPI = True -except ImportError: - WINAPI = False - - -class WinServiceTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.win_service - """ - - def setup_loader_modules(self): - return {win_service: {}} - - def test_get_enabled(self): - """ - Test to return the enabled services - """ - mock = MagicMock( - return_value=[ - {"ServiceName": "spongebob"}, - {"ServiceName": "squarepants"}, - {"ServiceName": "patrick"}, - ] - ) - with patch.object(win_service, "_get_services", mock): - mock_info = MagicMock( - side_effect=[ - {"StartType": "Auto"}, - {"StartType": "Manual"}, - {"StartType": "Disabled"}, - ] - ) - with patch.object(win_service, "info", mock_info): - self.assertListEqual(win_service.get_enabled(), ["spongebob"]) - - def test_get_disabled(self): - """ - Test to return the disabled services - """ - mock = MagicMock( - return_value=[ - {"ServiceName": "spongebob"}, - {"ServiceName": "squarepants"}, - {"ServiceName": "patrick"}, - ] - ) - with patch.object(win_service, "_get_services", mock): - mock_info = MagicMock( - side_effect=[ - {"StartType": "Auto"}, - {"StartType": "Manual"}, - {"StartType": "Disabled"}, - ] - ) - with patch.object(win_service, "info", mock_info): - self.assertListEqual( - win_service.get_disabled(), ["patrick", "squarepants"] - ) - - def test_available(self): - """ - Test to Returns ``True`` if the specified service - is available, otherwise returns ``False`` - """ - mock = MagicMock(return_value=["c", "a", "b"]) - with patch.object(win_service, "get_all", mock): - self.assertTrue(win_service.available("a")) - - def test_missing(self): - """ - Test to the inverse of service.available - """ - mock = MagicMock(return_value=["c", "a", "b"]) - with patch.object(win_service, "get_all", mock): - self.assertTrue(win_service.missing("d")) - - def test_get_all(self): - """ - Test to return all installed services - """ - mock = MagicMock( - return_value=[ - {"ServiceName": "spongebob"}, - {"ServiceName": "squarepants"}, - {"ServiceName": "patrick"}, - ] - ) - with patch.object(win_service, "_get_services", mock): - self.assertListEqual( - win_service.get_all(), ["patrick", "spongebob", "squarepants"] - ) - - def test_get_service_name(self): - """ - Test to the Display Name is what is displayed - in Windows when services.msc is executed. - """ - mock = MagicMock( - return_value=[ - {"ServiceName": "spongebob", "DisplayName": "Sponge Bob"}, - {"ServiceName": "squarepants", "DisplayName": "Square Pants"}, - {"ServiceName": "patrick", "DisplayName": "Patrick the Starfish"}, - ] - ) - with patch.object(win_service, "_get_services", mock): - self.assertDictEqual( - win_service.get_service_name(), - { - "Patrick the Starfish": "patrick", - "Sponge Bob": "spongebob", - "Square Pants": "squarepants", - }, - ) - self.assertDictEqual( - win_service.get_service_name("patrick"), - {"Patrick the Starfish": "patrick"}, - ) - - @pytest.mark.skipif(not WINAPI, reason="win32serviceutil not available") - @pytest.mark.slow_test - def test_start(self): - """ - Test to start the specified service - """ - mock_true = MagicMock(return_value=True) - mock_false = MagicMock(return_value=False) - mock_info = MagicMock(side_effect=[{"Status": "Running"}]) - - with patch.object(win32serviceutil, "StartService", mock_true), patch.object( - win_service, "disabled", mock_false - ), patch.object(win_service, "info", mock_info): - self.assertTrue(win_service.start("spongebob")) - - mock_info = MagicMock( - side_effect=[ - {"Status": "Stopped", "Status_WaitHint": 0}, - {"Status": "Start Pending", "Status_WaitHint": 0}, - {"Status": "Running"}, - ] - ) - - with patch.object(win32serviceutil, "StartService", mock_true), patch.object( - win_service, "disabled", mock_false - ), patch.object(win_service, "info", mock_info), patch.object( - win_service, "status", mock_true - ): - self.assertTrue(win_service.start("spongebob")) - - @pytest.mark.skipif(not WINAPI, reason="pywintypes not available") - def test_start_already_running(self): - """ - Test starting a service that is already running - """ - mock_false = MagicMock(return_value=False) - mock_error = MagicMock( - side_effect=pywintypes.error(1056, "StartService", "Service is running") - ) - mock_info = MagicMock(side_effect=[{"Status": "Running"}]) - with patch.object(win32serviceutil, "StartService", mock_error), patch.object( - win_service, "disabled", mock_false - ), patch.object(win_service, "_status_wait", mock_info): - self.assertTrue(win_service.start("spongebob")) - - @pytest.mark.skipif(not WINAPI, reason="win32serviceutil not available") - @pytest.mark.slow_test - def test_stop(self): - """ - Test to stop the specified service - """ - mock_true = MagicMock(return_value=True) - mock_false = MagicMock(return_value=False) - mock_info = MagicMock(side_effect=[{"Status": "Stopped"}]) - - with patch.object(win32serviceutil, "StopService", mock_true), patch.object( - win_service, "_status_wait", mock_info - ): - self.assertTrue(win_service.stop("spongebob")) - - mock_info = MagicMock( - side_effect=[ - {"Status": "Running", "Status_WaitHint": 0}, - {"Status": "Stop Pending", "Status_WaitHint": 0}, - {"Status": "Stopped"}, - ] - ) - - with patch.object(win32serviceutil, "StopService", mock_true), patch.object( - win_service, "info", mock_info - ), patch.object(win_service, "status", mock_false): - self.assertTrue(win_service.stop("spongebob")) - - @pytest.mark.skipif(not WINAPI, reason="pywintypes not available") - def test_stop_not_running(self): - """ - Test stopping a service that is already stopped - """ - mock_error = MagicMock( - side_effect=pywintypes.error(1062, "StopService", "Service is not running") - ) - mock_info = MagicMock(side_effect=[{"Status": "Stopped"}]) - with patch.object(win32serviceutil, "StopService", mock_error), patch.object( - win_service, "_status_wait", mock_info - ): - self.assertTrue(win_service.stop("spongebob")) - - def test_restart(self): - """ - Test to restart the named service - """ - mock_true = MagicMock(return_value=True) - with patch.object(win_service, "create_win_salt_restart_task", mock_true): - with patch.object(win_service, "execute_salt_restart_task", mock_true): - self.assertTrue(win_service.restart("salt-minion")) - - with patch.object(win_service, "stop", mock_true): - with patch.object(win_service, "start", mock_true): - self.assertTrue(win_service.restart("salt")) - - def test_createwin_saltrestart_task(self): - """ - Test to create a task in Windows task - scheduler to enable restarting the salt-minion - """ - cmd = salt.utils.path.which("cmd") - mock = MagicMock() - with patch.dict(win_service.__salt__, {"task.create_task": mock}): - win_service.create_win_salt_restart_task() - mock.assert_called_once_with( - action_type="Execute", - arguments=( - "/c ping -n 3 127.0.0.1 && net stop salt-minion && " - "net start salt-minion" - ), - cmd=cmd, - force=True, - name="restart-salt-minion", - start_date="1975-01-01", - start_time="01:00", - trigger_type="Once", - user_name="System", - ) - - def test_execute_salt_restart_task(self): - """ - Test to run the Windows Salt restart task - """ - mock_true = MagicMock(return_value=True) - with patch.dict(win_service.__salt__, {"task.run": mock_true}): - self.assertTrue(win_service.execute_salt_restart_task()) - - @pytest.mark.skipif(not WINAPI, reason="win32serviceutil not available") - def test_status(self): - """ - Test to return the status for a service - """ - mock_info = MagicMock( - side_effect=[ - {"Status": "Running"}, - {"Status": "Stop Pending"}, - {"Status": "Stopped"}, - ] - ) - - with patch.object(win_service, "info", mock_info): - self.assertTrue(win_service.status("spongebob")) - self.assertTrue(win_service.status("patrick")) - self.assertFalse(win_service.status("squidward")) - - def test_getsid(self): - """ - Test to return the sid for this windows service - """ - mock_info = MagicMock( - side_effect=[{"sid": "S-1-5-80-1956725871..."}, {"sid": None}] - ) - with patch.object(win_service, "info", mock_info): - self.assertEqual(win_service.getsid("spongebob"), "S-1-5-80-1956725871...") - self.assertEqual(win_service.getsid("plankton"), None) - - def test_enable(self): - """ - Test to enable the named service to start at boot - """ - mock_modify = MagicMock(return_value=True) - mock_info = MagicMock( - return_value={"StartType": "Auto", "StartTypeDelayed": False} - ) - with patch.object(win_service, "modify", mock_modify): - with patch.object(win_service, "info", mock_info): - self.assertTrue(win_service.enable("spongebob")) - - def test_disable(self): - """ - Test to disable the named service to start at boot - """ - mock_modify = MagicMock(return_value=True) - mock_info = MagicMock(return_value={"StartType": "Disabled"}) - with patch.object(win_service, "modify", mock_modify): - with patch.object(win_service, "info", mock_info): - self.assertTrue(win_service.disable("spongebob")) - - def test_enabled(self): - """ - Test to check to see if the named - service is enabled to start on boot - """ - mock = MagicMock(side_effect=[{"StartType": "Auto"}, {"StartType": "Disabled"}]) - with patch.object(win_service, "info", mock): - self.assertTrue(win_service.enabled("spongebob")) - self.assertFalse(win_service.enabled("squarepants")) - - def test_enabled_with_space_in_name(self): - """ - Test to check to see if the named - service is enabled to start on boot - when have space in service name - """ - mock = MagicMock(side_effect=[{"StartType": "Auto"}, {"StartType": "Disabled"}]) - with patch.object(win_service, "info", mock): - self.assertTrue(win_service.enabled("spongebob test")) - self.assertFalse(win_service.enabled("squarepants test")) - - def test_disabled(self): - """ - Test to check to see if the named - service is disabled to start on boot - """ - mock = MagicMock(side_effect=[False, True]) - with patch.object(win_service, "enabled", mock): - self.assertTrue(win_service.disabled("spongebob")) - self.assertFalse(win_service.disabled("squarepants")) - - def test_cmd_quote(self): - """ - Make sure the command gets quoted correctly - """ - # Should always return command wrapped in double quotes - expected = r'"C:\Program Files\salt\test.exe"' - - # test no quotes - bin_path = r"C:\Program Files\salt\test.exe" - self.assertEqual(win_service._cmd_quote(bin_path), expected) - - # test single quotes - bin_path = r"'C:\Program Files\salt\test.exe'" - self.assertEqual(win_service._cmd_quote(bin_path), expected) - - # test double quoted single quotes - bin_path = "\"'C:\\Program Files\\salt\\test.exe'\"" - self.assertEqual(win_service._cmd_quote(bin_path), expected) - - # test single quoted, double quoted, single quotes - bin_path = "'\"'C:\\Program Files\\salt\\test.exe'\"'" - self.assertEqual(win_service._cmd_quote(bin_path), expected) diff --git a/tests/unit/modules/test_win_shadow.py b/tests/unit/modules/test_win_shadow.py deleted file mode 100644 index 392da373775d..000000000000 --- a/tests/unit/modules/test_win_shadow.py +++ /dev/null @@ -1,64 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - - -import salt.modules.win_shadow as win_shadow -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class WinShadowTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.win_shadow - """ - - def setup_loader_modules(self): - return { - win_shadow: { - "__salt__": { - # 'user.info': MagicMock(return_value=True), - "user.update": MagicMock(return_value=True) - } - } - } - - # 'info' function tests: 1 - - def test_info(self): - """ - Test if it return information for the specified user - """ - mock_user_info = MagicMock( - return_value={"name": "SALT", "password_changed": "", "expiration_date": ""} - ) - with patch.dict(win_shadow.__salt__, {"user.info": mock_user_info}): - self.assertDictEqual( - win_shadow.info("SALT"), - { - "name": "SALT", - "passwd": "Unavailable", - "lstchg": "", - "min": "", - "max": "", - "warn": "", - "inact": "", - "expire": "", - }, - ) - - # 'set_password' function tests: 1 - - def test_set_password(self): - """ - Test if it set the password for a named user. - """ - mock_cmd = MagicMock(return_value={"retcode": False}) - mock_user_info = MagicMock( - return_value={"name": "SALT", "password_changed": "", "expiration_date": ""} - ) - with patch.dict( - win_shadow.__salt__, {"cmd.run_all": mock_cmd, "user.info": mock_user_info} - ): - self.assertTrue(win_shadow.set_password("root", "mysecretpassword")) diff --git a/tests/unit/modules/test_win_snmp.py b/tests/unit/modules/test_win_snmp.py deleted file mode 100644 index 4c3c746c0d52..000000000000 --- a/tests/unit/modules/test_win_snmp.py +++ /dev/null @@ -1,114 +0,0 @@ -""" - :synopsis: Unit Tests for Windows SNMP Module 'module.win_snmp' - :platform: Windows - :maturity: develop - .. versionadded:: 2017.7.0 -""" - - -import salt.modules.win_snmp as win_snmp -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -COMMUNITY_NAMES = {"TestCommunity": "Read Create"} - - -class WinSnmpTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.win_snmp - """ - - def setup_loader_modules(self): - return {win_snmp: {}} - - def test_get_agent_service_types(self): - """ - Test - Get the sysServices types that can be configured. - """ - self.assertIsInstance(win_snmp.get_agent_service_types(), list) - - def test_get_permission_types(self): - """ - Test - Get the permission types that can be configured for communities. - """ - self.assertIsInstance(win_snmp.get_permission_types(), list) - - def test_get_auth_traps_enabled(self): - """ - Test - Determine whether the host is configured to send authentication traps. - """ - mock_value = MagicMock(return_value={"vdata": 1}) - with patch.dict(win_snmp.__utils__, {"reg.read_value": mock_value}): - self.assertTrue(win_snmp.get_auth_traps_enabled()) - - def test_set_auth_traps_enabled(self): - """ - Test - Manage the sending of authentication traps. - """ - mock_value = MagicMock(return_value=True) - kwargs = {"status": True} - with patch.dict(win_snmp.__utils__, {"reg.set_value": mock_value}), patch( - "salt.modules.win_snmp.get_auth_traps_enabled", MagicMock(return_value=True) - ): - self.assertTrue(win_snmp.set_auth_traps_enabled(**kwargs)) - - def test_get_community_names(self): - """ - Test - Get the current accepted SNMP community names and their permissions. - """ - mock_ret = MagicMock(return_value=[{"vdata": 16, "vname": "TestCommunity"}]) - mock_false = MagicMock(return_value=False) - with patch.dict( - win_snmp.__utils__, - {"reg.list_values": mock_ret, "reg.key_exists": mock_false}, - ): - self.assertEqual(win_snmp.get_community_names(), COMMUNITY_NAMES) - - def test_get_community_names_gpo(self): - """ - Test - Get the current accepted SNMP community names and their permissions. - """ - mock_ret = MagicMock(return_value=[{"vdata": "TestCommunity", "vname": 1}]) - mock_false = MagicMock(return_value=True) - with patch.dict( - win_snmp.__utils__, - {"reg.list_values": mock_ret, "reg.key_exists": mock_false}, - ): - self.assertEqual( - win_snmp.get_community_names(), {"TestCommunity": "Managed by GPO"} - ) - - def test_set_community_names(self): - """ - Test - Manage the SNMP accepted community names and their permissions. - """ - mock_true = MagicMock(return_value=True) - kwargs = {"communities": COMMUNITY_NAMES} - mock_false = MagicMock(return_value=False) - with patch.dict( - win_snmp.__utils__, - {"reg.set_value": mock_true, "reg.key_exists": mock_false}, - ), patch( - "salt.modules.win_snmp.get_community_names", - MagicMock(return_value=COMMUNITY_NAMES), - ): - self.assertTrue(win_snmp.set_community_names(**kwargs)) - - def test_set_community_names_gpo(self): - """ - Test - Manage the SNMP accepted community names and their permissions. - """ - mock_true = MagicMock(return_value=True) - kwargs = {"communities": COMMUNITY_NAMES} - with patch.dict( - win_snmp.__utils__, - {"reg.set_value": mock_true, "reg.key_exists": mock_true}, - ), patch( - "salt.modules.win_snmp.get_community_names", - MagicMock(return_value=COMMUNITY_NAMES), - ): - self.assertRaises( - CommandExecutionError, win_snmp.set_community_names, **kwargs - ) diff --git a/tests/unit/modules/test_win_task.py b/tests/unit/modules/test_win_task.py deleted file mode 100644 index 493c200164fe..000000000000 --- a/tests/unit/modules/test_win_task.py +++ /dev/null @@ -1,56 +0,0 @@ -import pytest - -import salt.modules.win_task as win_task -from tests.support.unit import TestCase - - -@pytest.mark.skip_unless_on_windows -@pytest.mark.destructive_test -class WinTaskTestCase(TestCase): - """ - Test cases for salt.modules.win_task - """ - - def test_repeat_interval(self): - task_name = "SaltTest1" - try: - ret = win_task.create_task( - task_name, - user_name="System", - force=True, - action_type="Execute", - cmd="c:\\salt\\salt-call.bat", - trigger_type="Daily", - trigger_enabled=True, - repeat_duration="30 minutes", - repeat_interval="30 minutes", - ) - self.assertTrue(ret) - - ret = win_task.info(task_name) - self.assertEqual(ret["triggers"][0]["trigger_type"], "Daily") - finally: - ret = win_task.delete_task(task_name) - self.assertTrue(ret) - - def test_repeat_interval_and_indefinitely(self): - task_name = "SaltTest2" - try: - ret = win_task.create_task( - task_name, - user_name="System", - force=True, - action_type="Execute", - cmd="c:\\salt\\salt-call.bat", - trigger_type="Daily", - trigger_enabled=True, - repeat_duration="Indefinitely", - repeat_interval="30 minutes", - ) - self.assertTrue(ret) - - ret = win_task.info(task_name) - self.assertEqual(ret["triggers"][0]["trigger_type"], "Daily") - finally: - ret = win_task.delete_task(task_name) - self.assertTrue(ret) diff --git a/tests/unit/modules/test_xapi_virt.py b/tests/unit/modules/test_xapi_virt.py deleted file mode 100644 index c7e5b79316e4..000000000000 --- a/tests/unit/modules/test_xapi_virt.py +++ /dev/null @@ -1,397 +0,0 @@ -""" - :codeauthor: Rahul Handay -""" - - -import salt.modules.xapi_virt as xapi -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, mock_open, patch -from tests.support.unit import TestCase - - -class Mockxapi: - """ - Mock xapi class - """ - - def __init__(self): - pass - - class Session: - """ - Mock Session class - """ - - def __init__(self, xapi_uri): - pass - - class xenapi: - """ - Mock xenapi class - """ - - def __init__(self): - pass - - @staticmethod - def login_with_password(xapi_login, xapi_password): - """ - Mock login_with_password method - """ - return xapi_login, xapi_password - - class session: - """ - Mock session class - """ - - def __init__(self): - pass - - @staticmethod - def logout(): - """ - Mock logout method - """ - return Mockxapi() - - -class XapiTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.xapi - """ - - def setup_loader_modules(self): - return {xapi: {}} - - def test_list_domains(self): - """ - Test to return a list of domain names on the minion - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - self.assertListEqual(xapi.list_domains(), []) - - def test_vm_info(self): - """ - Test to return detailed information about the vms - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(return_value=False) - with patch.object(xapi, "_get_record_by_label", mock): - self.assertDictEqual(xapi.vm_info(True), {True: False}) - - def test_vm_state(self): - """ - Test to return list of all the vms and their state. - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(return_value={"power_state": "1"}) - with patch.object(xapi, "_get_record_by_label", mock): - self.assertDictEqual(xapi.vm_state("salt"), {"salt": "1"}) - - self.assertDictEqual(xapi.vm_state(), {}) - - def test_get_nics(self): - """ - Test to return info about the network interfaces of a named vm - """ - ret = {"Stack": {"device": "ETH0", "mac": "Stack", "mtu": 1}} - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, {"VIFs": "salt"}]) - with patch.object(xapi, "_get_record_by_label", mock): - self.assertFalse(xapi.get_nics("salt")) - - mock = MagicMock( - return_value={"MAC": "Stack", "device": "ETH0", "MTU": 1} - ) - with patch.object(xapi, "_get_record", mock): - self.assertDictEqual(xapi.get_nics("salt"), ret) - - def test_get_macs(self): - """ - Test to return a list off MAC addresses from the named vm - """ - mock = MagicMock(side_effect=[None, ["a", "b", "c"]]) - with patch.object(xapi, "get_nics", mock): - self.assertIsNone(xapi.get_macs("salt")) - - self.assertListEqual(xapi.get_macs("salt"), ["a", "b", "c"]) - - def test_get_disks(self): - """ - Test to return the disks of a named vm - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.get_disks("salt")) - - self.assertDictEqual(xapi.get_disks("salt"), {}) - - def test_setmem(self): - """ - Test to changes the amount of memory allocated to VM. - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.setmem("salt", "1")) - - self.assertTrue(xapi.setmem("salt", "1")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.setmem("salt", "1")) - - def test_setvcpus(self): - """ - Test to changes the amount of vcpus allocated to VM. - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.setvcpus("salt", "1")) - - self.assertTrue(xapi.setvcpus("salt", "1")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.setvcpus("salt", "1")) - - def test_vcpu_pin(self): - """ - Test to Set which CPUs a VCPU can use. - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.vcpu_pin("salt", "1", "2")) - - self.assertTrue(xapi.vcpu_pin("salt", "1", "2")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - with patch.dict(xapi.__salt__, {"cmd.run": mock}): - self.assertTrue(xapi.vcpu_pin("salt", "1", "2")) - - def test_freemem(self): - """ - Test to return an int representing the amount of memory - that has not been given to virtual machines on this node - """ - mock = MagicMock(return_value={"free_memory": 1024}) - with patch.object(xapi, "node_info", mock): - self.assertEqual(xapi.freemem(), 1024) - - def test_freecpu(self): - """ - Test to return an int representing the number - of unallocated cpus on this hypervisor - """ - mock = MagicMock(return_value={"free_cpus": 1024}) - with patch.object(xapi, "node_info", mock): - self.assertEqual(xapi.freecpu(), 1024) - - def test_full_info(self): - """ - Test to return the node_info, vm_info and freemem - """ - mock = MagicMock(return_value="salt") - with patch.object(xapi, "node_info", mock): - mock = MagicMock(return_value="stack") - with patch.object(xapi, "vm_info", mock): - self.assertDictEqual( - xapi.full_info(), {"node_info": "salt", "vm_info": "stack"} - ) - - def test_shutdown(self): - """ - Test to send a soft shutdown signal to the named vm - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.shutdown("salt")) - - self.assertTrue(xapi.shutdown("salt")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.shutdown("salt")) - - def test_pause(self): - """ - Test to pause the named vm - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.pause("salt")) - - self.assertTrue(xapi.pause("salt")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.pause("salt")) - - def test_resume(self): - """ - Test to resume the named vm - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.resume("salt")) - - self.assertTrue(xapi.resume("salt")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.resume("salt")) - - def test_start(self): - """ - Test to reboot a domain via ACPI request - """ - mock = MagicMock(return_value=True) - with patch.object(xapi, "start", mock): - self.assertTrue(xapi.start("salt")) - - def test_reboot(self): - """ - Test to reboot a domain via ACPI request - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.reboot("salt")) - - self.assertTrue(xapi.reboot("salt")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.reboot("salt")) - - def test_reset(self): - """ - Test to reset a VM by emulating the - reset button on a physical machine - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.reset("salt")) - - self.assertTrue(xapi.reset("salt")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.reset("salt")) - - def test_migrate(self): - """ - Test to migrates the virtual machine to another hypervisor - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.migrate("salt", "stack")) - - self.assertTrue(xapi.migrate("salt", "stack")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.migrate("salt", "stack")) - - def test_stop(self): - """ - Test to Hard power down the virtual machine, - this is equivalent to pulling the power - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(side_effect=[False, ["a", "b", "c"]]) - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.stop("salt")) - - self.assertTrue(xapi.stop("salt")) - - with patch.object(xapi, "_check_xenapi", MagicMock(return_value=Mockxapi)): - mock = MagicMock(return_value=True) - with patch.dict(xapi.__salt__, {"config.option": mock}): - with patch.object(xapi, "_get_label_uuid", mock): - self.assertFalse(xapi.stop("salt")) - - def test_is_hyper(self): - """ - Test to returns a bool whether or not - this node is a hypervisor of any kind - """ - with patch.dict(xapi.__grains__, {"virtual_subtype": "Dom0"}): - self.assertFalse(xapi.is_hyper()) - - with patch.dict(xapi.__grains__, {"virtual": "Xen Dom0"}): - self.assertFalse(xapi.is_hyper()) - - with patch.dict(xapi.__grains__, {"virtual_subtype": "Xen Dom0"}): - with patch("salt.utils.files.fopen", mock_open(read_data="salt")): - self.assertFalse(xapi.is_hyper()) - - with patch("salt.utils.files.fopen", mock_open()) as mock_read: - mock_read.side_effect = IOError - self.assertFalse(xapi.is_hyper()) - - with patch("salt.utils.files.fopen", mock_open(read_data="xen_")): - with patch.dict(xapi.__grains__, {"ps": "salt"}): - mock = MagicMock(return_value={"xenstore": "salt"}) - with patch.dict(xapi.__salt__, {"cmd.run": mock}): - self.assertTrue(xapi.is_hyper()) - - def test_vm_cputime(self): - """ - Test to Return cputime used by the vms - """ - ret = {"1": {"cputime_percent": 0, "cputime": 1}} - with patch.object(xapi, "_get_xapi_session", MagicMock()): - mock = MagicMock(return_value={"host_CPUs": "1"}) - with patch.object(xapi, "_get_record_by_label", mock): - mock = MagicMock( - return_value={"VCPUs_number": "1", "VCPUs_utilisation": {"0": "1"}} - ) - with patch.object(xapi, "_get_metrics_record", mock): - self.assertDictEqual(xapi.vm_cputime("1"), ret) - - mock = MagicMock(return_value={}) - with patch.object(xapi, "list_domains", mock): - self.assertDictEqual(xapi.vm_cputime(""), {}) - - def test_vm_netstats(self): - """ - Test to return combined network counters used by the vms - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - self.assertDictEqual(xapi.vm_netstats(""), {}) - - def test_vm_diskstats(self): - """ - Test to return disk usage counters used by the vms - """ - with patch.object(xapi, "_get_xapi_session", MagicMock()): - self.assertDictEqual(xapi.vm_diskstats(""), {}) diff --git a/tests/unit/modules/test_xfs.py b/tests/unit/modules/test_xfs.py deleted file mode 100644 index 47b75a045098..000000000000 --- a/tests/unit/modules/test_xfs.py +++ /dev/null @@ -1,113 +0,0 @@ -import textwrap - -import salt.modules.xfs as xfs -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -@patch("salt.modules.xfs._get_mounts", MagicMock(return_value={})) -class XFSTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.xfs - """ - - def setup_loader_modules(self): - return {xfs: {}} - - def test__blkid_output(self): - """ - Test xfs._blkid_output when there is data - """ - blkid_export = textwrap.dedent( - """ - DEVNAME=/dev/sda1 - UUID=XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - TYPE=xfs - PARTUUID=YYYYYYYY-YY - - DEVNAME=/dev/sdb1 - PARTUUID=ZZZZZZZZ-ZZZZ-ZZZZ-ZZZZ-ZZZZZZZZZZZZ - """ - ) - # We expect to find only data from /dev/sda1, nothig from - # /dev/sdb1 - self.assertEqual( - xfs._blkid_output(blkid_export), - { - "/dev/sda1": { - "label": None, - "partuuid": "YYYYYYYY-YY", - "uuid": "XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX", - } - }, - ) - - def test__parse_xfs_info(self): - """ - Test parsing output from mkfs.xfs. - """ - data = textwrap.dedent( - """ - meta-data=/dev/vg00/testvol isize=512 agcount=4, agsize=1310720 blks - = sectsz=4096 attr=2, projid32bit=1 - = crc=1 finobt=1, sparse=1, rmapbt=0 - = reflink=1 - data = bsize=4096 blocks=5242880, imaxpct=25 - = sunit=0 swidth=0 blks - naming =version 2 bsize=4096 ascii-ci=0, ftype=1 - log =internal log bsize=4096 blocks=2560, version=2 - = sectsz=4096 sunit=1 blks, lazy-count=1 - realtime =none extsz=4096 blocks=0, rtextents=0 - Discarding blocks...Done. - """ - ) - - self.assertEqual( - xfs._parse_xfs_info(data), - { - "meta-data": { - "section": "/dev/vg00/testvol", - "isize": "512", - "agcount": "4", - "agsize": "1310720 blks", - "sectsz": "4096", - "attr": "2", - "projid32bit": "1", - "crc": "1", - "finobt": "1", - "sparse": "1", - "rmapbt": "0", - "reflink": "1", - }, - "data": { - "section": "data", - "bsize": "4096", - "blocks": "5242880", - "imaxpct": "25", - "sunit": "0", - "swidth": "0 blks", - }, - "naming": { - "section": "version 2", - "bsize": "4096", - "ascii-ci": "0", - "ftype": "1", - }, - "log": { - "section": "internal log", - "bsize": "4096", - "blocks": "2560", - "version": "2", - "sectsz": "4096", - "sunit": "1 blks", - "lazy-count": "1", - }, - "realtime": { - "section": "none", - "extsz": "4096", - "blocks": "0", - "rtextents": "0", - }, - }, - ) diff --git a/tests/unit/modules/test_xml.py b/tests/unit/modules/test_xml.py deleted file mode 100644 index a37779534e7a..000000000000 --- a/tests/unit/modules/test_xml.py +++ /dev/null @@ -1,97 +0,0 @@ -""" - Tests for xml module -""" - - -import os -import tempfile - -from salt.modules import xml -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.unit import TestCase - -XML_STRING = """ - - - Christian Bale - Liam Neeson - Michael Caine - - - Tom Waits - B.B. King - Ray Charles - - - """ - - -class XmlTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.xml - """ - - def setup_loader_modules(self): - return {xml: {}} - - def test_get_value(self): - """ - Verify xml.get_value - """ - with tempfile.NamedTemporaryFile("w+", delete=False) as xml_file: - xml_file.write(XML_STRING) - xml_file.flush() - - xml_result = xml.get_value(xml_file.name, ".//actor[@id='2']") - self.assertEqual(xml_result, "Liam Neeson") - - os.remove(xml_file.name) - - def test_set_value(self): - """ - Verify xml.set_value - """ - with tempfile.NamedTemporaryFile("w+", delete=False) as xml_file: - xml_file.write(XML_STRING) - xml_file.flush() - - xml_result = xml.set_value( - xml_file.name, ".//actor[@id='2']", "Patrick Stewart" - ) - assert xml_result is True - - xml_result = xml.get_value(xml_file.name, ".//actor[@id='2']") - self.assertEqual(xml_result, "Patrick Stewart") - - os.remove(xml_file.name) - - def test_get_attribute(self): - """ - Verify xml.get_attribute - """ - with tempfile.NamedTemporaryFile("w+", delete=False) as xml_file: - xml_file.write(XML_STRING) - xml_file.flush() - - xml_result = xml.get_attribute(xml_file.name, ".//actor[@id='3']") - self.assertEqual(xml_result, {"id": "3"}) - - os.remove(xml_file.name) - - def test_set_attribute(self): - """ - Verify xml.set_value - """ - with tempfile.NamedTemporaryFile("w+", delete=False) as xml_file: - xml_file.write(XML_STRING) - xml_file.flush() - - xml_result = xml.set_attribute( - xml_file.name, ".//actor[@id='3']", "edited", "uh-huh" - ) - assert xml_result is True - - xml_result = xml.get_attribute(xml_file.name, ".//actor[@id='3']") - self.assertEqual(xml_result, {"edited": "uh-huh", "id": "3"}) - - os.remove(xml_file.name) diff --git a/tests/unit/modules/test_zcbuildout.py b/tests/unit/modules/test_zcbuildout.py index f793e3fc3f89..db7a862f727b 100644 --- a/tests/unit/modules/test_zcbuildout.py +++ b/tests/unit/modules/test_zcbuildout.py @@ -19,6 +19,16 @@ from tests.support.runtests import RUNTIME_VARS from tests.support.unit import TestCase +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, + pytest.mark.skip_on_windows( + reason=( + "Special steps are required for proper SSL validation because " + "`easy_install` is too old(and deprecated)." + ) + ), +] + KNOWN_VIRTUALENV_BINARY_NAMES = ( "virtualenv", "virtualenv2", diff --git a/tests/unit/netapi/rest_tornado/test_saltnado.py b/tests/unit/netapi/rest_tornado/test_saltnado.py index 7b63a65d4f30..c4758e700aba 100644 --- a/tests/unit/netapi/rest_tornado/test_saltnado.py +++ b/tests/unit/netapi/rest_tornado/test_saltnado.py @@ -647,7 +647,6 @@ def completer(): with patch.object( self.handler.application.event_listener, "get_event", - autospec=True, side_effect=fancy_get_event, ), patch.dict( self.handler.application.opts, @@ -698,7 +697,6 @@ def toggle_is_finished(*args, **kwargs): with patch.object( self.handler.application.event_listener, "get_event", - autospec=True, side_effect=fancy_get_event, ), patch.object( self.handler, @@ -729,8 +727,8 @@ def test_when_is_finished_then_all_collected_data_should_be_returned(self): { "tag": "fnord", "data": { - "return": "return from fnord {}".format(i), - "id": "fnord {}".format(i), + "return": f"return from fnord {i}", + "id": f"fnord {i}", }, } ) @@ -760,7 +758,6 @@ def toggle_is_finished(*args, **kwargs): with patch.object( self.handler.application.event_listener, "get_event", - autospec=True, side_effect=fancy_get_event, ), patch.object( self.handler, @@ -794,8 +791,8 @@ def test_when_is_timed_out_then_all_collected_data_should_be_returned(self): { "tag": "fnord", "data": { - "return": "return from fnord {}".format(i), - "id": "fnord {}".format(i), + "return": f"return from fnord {i}", + "id": f"fnord {i}", }, } ) @@ -820,7 +817,6 @@ def fancy_get_event(*args, **kwargs): with patch.object( self.handler.application.event_listener, "get_event", - autospec=True, side_effect=fancy_get_event, ), patch.dict( self.handler.application.opts, @@ -843,12 +839,12 @@ def test_when_minions_all_return_then_all_collected_data_should_be_returned(self completed_events = [salt.ext.tornado.gen.Future() for _ in range(10)] events_by_id = {} for i, event in enumerate(completed_events): - id_ = "fnord {}".format(i) + id_ = f"fnord {i}" events_by_id[id_] = event event.set_result( { "tag": "fnord", - "data": {"return": "return from {}".format(id_), "id": id_}, + "data": {"return": f"return from {id_}", "id": id_}, } ) expected_result = { @@ -878,7 +874,6 @@ def fancy_get_event(*args, **kwargs): with patch.object( self.handler.application.event_listener, "get_event", - autospec=True, side_effect=fancy_get_event, ), patch.dict( self.handler.application.opts, @@ -904,12 +899,12 @@ def test_when_min_wait_time_has_not_passed_then_disbatch_should_not_return_expec events_by_id = {} # Setup some real-enough looking return data for i, event in enumerate(completed_events): - id_ = "fnord {}".format(i) + id_ = f"fnord {i}" events_by_id[id_] = event event.set_result( { "tag": "fnord", - "data": {"return": "return from {}".format(id_), "id": id_}, + "data": {"return": f"return from {id_}", "id": id_}, } ) # Hard coded instead of dynamic to avoid potentially writing a test @@ -971,7 +966,6 @@ def fake_sleep(timer): with patch.object( self.handler.application.event_listener, "get_event", - autospec=True, side_effect=fancy_get_event, ), patch.object( self.handler, diff --git a/tests/unit/states/test_boto_apigateway.py b/tests/unit/states/test_boto_apigateway.py index 51c85d6058a5..7cf95a43442b 100644 --- a/tests/unit/states/test_boto_apigateway.py +++ b/tests/unit/states/test_boto_apigateway.py @@ -28,6 +28,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/states/test_boto_cognitoidentity.py b/tests/unit/states/test_boto_cognitoidentity.py index 4354df0546f6..f84a055dd2da 100644 --- a/tests/unit/states/test_boto_cognitoidentity.py +++ b/tests/unit/states/test_boto_cognitoidentity.py @@ -25,6 +25,10 @@ except ImportError: HAS_BOTO = False +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + # pylint: enable=import-error,no-name-in-module diff --git a/tests/unit/states/test_gem.py b/tests/unit/states/test_gem.py deleted file mode 100644 index 90f48a518903..000000000000 --- a/tests/unit/states/test_gem.py +++ /dev/null @@ -1,134 +0,0 @@ -# Late import so mock can do its job -import salt.states.gem as gem -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class TestGemState(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - return {gem: {"__opts__": {"test": False}}} - - def test_installed(self): - gems = {"foo": ["1.0"], "bar": ["2.0"]} - gem_list = MagicMock(return_value=gems) - gem_install_succeeds = MagicMock(return_value=True) - gem_install_fails = MagicMock(return_value=False) - - with patch.dict(gem.__salt__, {"gem.list": gem_list}): - with patch.dict(gem.__salt__, {"gem.install": gem_install_succeeds}): - ret = gem.installed("foo") - self.assertEqual(True, ret["result"]) - ret = gem.installed("quux") - self.assertEqual(True, ret["result"]) - gem_install_succeeds.assert_called_once_with( - "quux", - pre_releases=False, - ruby=None, - runas=None, - version=None, - proxy=None, - rdoc=False, - source=None, - ri=False, - gem_bin=None, - ) - - with patch.dict(gem.__salt__, {"gem.install": gem_install_fails}): - ret = gem.installed("quux") - self.assertEqual(False, ret["result"]) - gem_install_fails.assert_called_once_with( - "quux", - pre_releases=False, - ruby=None, - runas=None, - version=None, - proxy=None, - rdoc=False, - source=None, - ri=False, - gem_bin=None, - ) - - def test_installed_version(self): - gems = {"foo": ["1.0"], "bar": ["2.0"]} - gem_list = MagicMock(return_value=gems) - gem_install_succeeds = MagicMock(return_value=True) - - with patch.dict(gem.__salt__, {"gem.list": gem_list}): - with patch.dict(gem.__salt__, {"gem.install": gem_install_succeeds}): - ret = gem.installed("foo", version=">= 1.0") - self.assertEqual(True, ret["result"]) - self.assertEqual( - "Installed Gem meets version requirements.", ret["comment"] - ) - - def test_removed(self): - gems = ["foo", "bar"] - gem_list = MagicMock(return_value=gems) - gem_uninstall_succeeds = MagicMock(return_value=True) - gem_uninstall_fails = MagicMock(return_value=False) - with patch.dict(gem.__salt__, {"gem.list": gem_list}): - with patch.dict(gem.__salt__, {"gem.uninstall": gem_uninstall_succeeds}): - ret = gem.removed("quux") - self.assertEqual(True, ret["result"]) - ret = gem.removed("foo") - self.assertEqual(True, ret["result"]) - gem_uninstall_succeeds.assert_called_once_with( - "foo", None, runas=None, gem_bin=None - ) - - with patch.dict(gem.__salt__, {"gem.uninstall": gem_uninstall_fails}): - ret = gem.removed("bar") - self.assertEqual(False, ret["result"]) - gem_uninstall_fails.assert_called_once_with( - "bar", None, runas=None, gem_bin=None - ) - - def test_sources_add(self): - gem_sources = ["http://foo", "http://bar"] - gem_sources_list = MagicMock(return_value=gem_sources) - gem_sources_add_succeeds = MagicMock(return_value=True) - gem_sources_add_fails = MagicMock(return_value=False) - with patch.dict(gem.__salt__, {"gem.sources_list": gem_sources_list}): - with patch.dict( - gem.__salt__, {"gem.sources_add": gem_sources_add_succeeds} - ): - ret = gem.sources_add("http://foo") - self.assertEqual(True, ret["result"]) - ret = gem.sources_add("http://fui") - self.assertEqual(True, ret["result"]) - gem_sources_add_succeeds.assert_called_once_with( - source_uri="http://fui", ruby=None, runas=None - ) - with patch.dict(gem.__salt__, {"gem.sources_add": gem_sources_add_fails}): - ret = gem.sources_add("http://fui") - self.assertEqual(False, ret["result"]) - gem_sources_add_fails.assert_called_once_with( - source_uri="http://fui", ruby=None, runas=None - ) - - def test_sources_remove(self): - gem_sources = ["http://foo", "http://bar"] - gem_sources_list = MagicMock(return_value=gem_sources) - gem_sources_remove_succeeds = MagicMock(return_value=True) - gem_sources_remove_fails = MagicMock(return_value=False) - with patch.dict(gem.__salt__, {"gem.sources_list": gem_sources_list}): - with patch.dict( - gem.__salt__, {"gem.sources_remove": gem_sources_remove_succeeds} - ): - ret = gem.sources_remove("http://fui") - self.assertEqual(True, ret["result"]) - ret = gem.sources_remove("http://foo") - self.assertEqual(True, ret["result"]) - gem_sources_remove_succeeds.assert_called_once_with( - source_uri="http://foo", ruby=None, runas=None - ) - with patch.dict( - gem.__salt__, {"gem.sources_remove": gem_sources_remove_fails} - ): - ret = gem.sources_remove("http://bar") - self.assertEqual(False, ret["result"]) - gem_sources_remove_fails.assert_called_once_with( - source_uri="http://bar", ruby=None, runas=None - ) diff --git a/tests/unit/states/test_glusterfs.py b/tests/unit/states/test_glusterfs.py deleted file mode 100644 index 5204d3607aa2..000000000000 --- a/tests/unit/states/test_glusterfs.py +++ /dev/null @@ -1,451 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import salt.modules.glusterfs as mod_glusterfs -import salt.states.glusterfs as glusterfs -import salt.utils.cloud -import salt.utils.network -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class GlusterfsTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.glusterfs - """ - - def setup_loader_modules(self): - return {glusterfs: {"__salt__": {"glusterfs.peer": mod_glusterfs.peer}}} - - # 'peered' function tests: 1 - - def test_peered(self): - """ - Test to verify if node is peered. - """ - name = "server1" - - ret = {"name": name, "result": True, "comment": "", "changes": {}} - - mock_ip = MagicMock(return_value=["1.2.3.4", "1.2.3.5"]) - mock_ip6 = MagicMock(return_value=["2001:db8::1"]) - mock_host_ips = MagicMock(return_value=["1.2.3.5"]) - mock_peer = MagicMock(return_value=True) - mock_status = MagicMock(return_value={"uuid1": {"hostnames": [name]}}) - - with patch.dict( - glusterfs.__salt__, - {"glusterfs.peer_status": mock_status, "glusterfs.peer": mock_peer}, - ): - with patch.object(salt.utils.network, "ip_addrs", mock_ip), patch.object( - salt.utils.network, "ip_addrs6", mock_ip6 - ), patch.object(salt.utils.network, "host_to_ips", mock_host_ips): - comt = "Peering with localhost is not needed" - ret.update({"comment": comt}) - self.assertDictEqual(glusterfs.peered(name), ret) - - mock_host_ips.return_value = ["127.0.1.1"] - comt = "Peering with localhost is not needed" - ret.update({"comment": comt}) - self.assertDictEqual(glusterfs.peered(name), ret) - - mock_host_ips.return_value = ["2001:db8::1"] - self.assertDictEqual(glusterfs.peered(name), ret) - - mock_host_ips.return_value = ["1.2.3.42"] - comt = "Host {} already peered".format(name) - ret.update({"comment": comt}) - self.assertDictEqual(glusterfs.peered(name), ret) - - with patch.dict(glusterfs.__opts__, {"test": False}): - old = {"uuid1": {"hostnames": ["other1"]}} - new = { - "uuid1": {"hostnames": ["other1"]}, - "uuid2": {"hostnames": ["someAlias", name]}, - } - mock_status.side_effect = [old, new] - comt = "Host {} successfully peered".format(name) - ret.update({"comment": comt, "changes": {"old": old, "new": new}}) - self.assertDictEqual(glusterfs.peered(name), ret) - mock_status.side_effect = None - - mock_status.return_value = {"uuid1": {"hostnames": ["other"]}} - mock_peer.return_value = False - - ret.update({"result": False}) - - comt = ( - "Failed to peer with {0}," + " please check logs for errors" - ).format(name) - ret.update({"comment": comt, "changes": {}}) - self.assertDictEqual(glusterfs.peered(name), ret) - - comt = "Invalid characters in peer name." - ret.update({"comment": comt, "name": ":/"}) - self.assertDictEqual(glusterfs.peered(":/"), ret) - ret.update({"name": name}) - - with patch.dict(glusterfs.__opts__, {"test": True}): - comt = "Peer {} will be added.".format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(glusterfs.peered(name), ret) - - # 'volume_present' function tests: 1 - - def test_volume_present(self): - """ - Test to ensure that a volume exists - """ - name = "salt" - bricks = ["host1:/brick1"] - ret = {"name": name, "result": True, "comment": "", "changes": {}} - - started_info = {name: {"status": "1"}} - stopped_info = {name: {"status": "0"}} - - mock_info = MagicMock() - mock_list = MagicMock() - mock_create = MagicMock() - mock_start = MagicMock(return_value=True) - - with patch.dict( - glusterfs.__salt__, - { - "glusterfs.info": mock_info, - "glusterfs.list_volumes": mock_list, - "glusterfs.create_volume": mock_create, - "glusterfs.start_volume": mock_start, - }, - ): - with patch.dict(glusterfs.__opts__, {"test": False}): - mock_list.return_value = [name] - mock_info.return_value = started_info - comt = "Volume {} already exists and is started".format(name) - ret.update({"comment": comt}) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=True), ret - ) - - mock_info.return_value = stopped_info - comt = "Volume {} already exists and is now started".format(name) - ret.update( - {"comment": comt, "changes": {"old": "stopped", "new": "started"}} - ) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=True), ret - ) - - comt = "Volume {} already exists".format(name) - ret.update({"comment": comt, "changes": {}}) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=False), ret - ) - with patch.dict(glusterfs.__opts__, {"test": True}): - comt = "Volume {} already exists".format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=False), ret - ) - - comt = ("Volume {0} already exists" + " and will be started").format( - name - ) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=True), ret - ) - - mock_list.return_value = [] - comt = "Volume {} will be created".format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=False), ret - ) - - comt = ("Volume {0} will be created" + " and started").format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=True), ret - ) - - with patch.dict(glusterfs.__opts__, {"test": False}): - mock_list.side_effect = [[], [name]] - comt = "Volume {} is created".format(name) - ret.update( - { - "comment": comt, - "result": True, - "changes": {"old": [], "new": [name]}, - } - ) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=False), ret - ) - - mock_list.side_effect = [[], [name]] - comt = "Volume {} is created and is now started".format(name) - ret.update({"comment": comt, "result": True}) - self.assertDictEqual( - glusterfs.volume_present(name, bricks, start=True), ret - ) - - mock_list.side_effect = None - mock_list.return_value = [] - mock_create.return_value = False - comt = "Creation of volume {} failed".format(name) - ret.update({"comment": comt, "result": False, "changes": {}}) - self.assertDictEqual(glusterfs.volume_present(name, bricks), ret) - - with patch.object( - salt.utils.cloud, "check_name", MagicMock(return_value=True) - ): - comt = "Invalid characters in volume name." - ret.update({"comment": comt, "result": False}) - self.assertDictEqual(glusterfs.volume_present(name, bricks), ret) - - # 'started' function tests: 1 - - def test_started(self): - """ - Test to check if volume has been started - """ - name = "salt" - - ret = {"name": name, "result": False, "comment": "", "changes": {}} - - started_info = {name: {"status": "1"}} - stopped_info = {name: {"status": "0"}} - mock_info = MagicMock(return_value={}) - mock_start = MagicMock(return_value=True) - - with patch.dict( - glusterfs.__salt__, - {"glusterfs.info": mock_info, "glusterfs.start_volume": mock_start}, - ): - comt = "Volume {} does not exist".format(name) - ret.update({"comment": comt}) - self.assertDictEqual(glusterfs.started(name), ret) - - mock_info.return_value = started_info - comt = "Volume {} is already started".format(name) - ret.update({"comment": comt, "result": True}) - self.assertDictEqual(glusterfs.started(name), ret) - - with patch.dict(glusterfs.__opts__, {"test": True}): - mock_info.return_value = stopped_info - comt = "Volume {} will be started".format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(glusterfs.started(name), ret) - - with patch.dict(glusterfs.__opts__, {"test": False}): - comt = "Volume {} is started".format(name) - ret.update( - { - "comment": comt, - "result": True, - "change": {"new": "started", "old": "stopped"}, - } - ) - self.assertDictEqual(glusterfs.started(name), ret) - - # 'add_volume_bricks' function tests: 1 - - def test_add_volume_bricks(self): - """ - Test to add brick(s) to an existing volume - """ - name = "salt" - bricks = ["host1:/drive1"] - old_bricks = ["host1:/drive2"] - - ret = {"name": name, "result": False, "comment": "", "changes": {}} - - stopped_volinfo = {"salt": {"status": "0"}} - volinfo = { - "salt": {"status": "1", "bricks": {"brick1": {"path": old_bricks[0]}}} - } - new_volinfo = { - "salt": { - "status": "1", - "bricks": { - "brick1": {"path": old_bricks[0]}, - "brick2": {"path": bricks[0]}, - }, - } - } - - mock_info = MagicMock(return_value={}) - mock_add = MagicMock(side_effect=[False, True]) - - with patch.dict( - glusterfs.__salt__, - {"glusterfs.info": mock_info, "glusterfs.add_volume_bricks": mock_add}, - ): - ret.update({"comment": "Volume salt does not exist"}) - self.assertDictEqual(glusterfs.add_volume_bricks(name, bricks), ret) - - mock_info.return_value = stopped_volinfo - ret.update({"comment": "Volume salt is not started"}) - self.assertDictEqual(glusterfs.add_volume_bricks(name, bricks), ret) - - mock_info.return_value = volinfo - ret.update({"comment": "Adding bricks to volume salt failed"}) - self.assertDictEqual(glusterfs.add_volume_bricks(name, bricks), ret) - - ret.update({"result": True}) - ret.update({"comment": "Bricks already added in volume salt"}) - self.assertDictEqual(glusterfs.add_volume_bricks(name, old_bricks), ret) - - mock_info.side_effect = [volinfo, new_volinfo] - ret.update( - { - "comment": "Bricks successfully added to volume salt", - "changes": {"new": bricks + old_bricks, "old": old_bricks}, - } - ) - # Let's sort ourselves because the test under python 3 sometimes fails - # just because of the new changes list order - result = glusterfs.add_volume_bricks(name, bricks) - ret["changes"]["new"] = sorted(ret["changes"]["new"]) - result["changes"]["new"] = sorted(result["changes"]["new"]) - self.assertDictEqual(result, ret) - - # 'op_version' function tests: 1 - - def test_op_version(self): - """ - Test setting the Glusterfs op-version - """ - name = "salt" - current = 30707 - new = 31200 - - ret = {"name": name, "result": False, "comment": "", "changes": {}} - - mock_get_version = MagicMock(return_value={}) - mock_set_version = MagicMock(return_value={}) - - with patch.dict( - glusterfs.__salt__, - { - "glusterfs.get_op_version": mock_get_version, - "glusterfs.set_op_version": mock_set_version, - }, - ): - mock_get_version.return_value = [False, "some error message"] - ret.update({"result": False}) - ret.update({"comment": "some error message"}) - self.assertDictEqual(glusterfs.op_version(name, current), ret) - - mock_get_version.return_value = current - ret.update({"result": True}) - ret.update( - { - "comment": ( - "Glusterfs cluster.op-version for {} already set to {}".format( - name, current - ) - ) - } - ) - self.assertDictEqual(glusterfs.op_version(name, current), ret) - - with patch.dict(glusterfs.__opts__, {"test": True}): - mock_set_version.return_value = [False, "Failed to set version"] - ret.update({"result": None}) - ret.update( - { - "comment": ( - "An attempt would be made to set the cluster.op-version for" - " {} to {}.".format(name, new) - ) - } - ) - self.assertDictEqual(glusterfs.op_version(name, new), ret) - - with patch.dict(glusterfs.__opts__, {"test": False}): - mock_set_version.return_value = [False, "Failed to set version"] - ret.update({"result": False}) - ret.update({"comment": "Failed to set version"}) - self.assertDictEqual(glusterfs.op_version(name, new), ret) - - mock_set_version.return_value = "some success message" - ret.update({"comment": "some success message"}) - ret.update({"changes": {"old": current, "new": new}}) - ret.update({"result": True}) - self.assertDictEqual(glusterfs.op_version(name, new), ret) - - # 'max_op_version' function tests: 1 - - def test_max_op_version(self): - """ - Test setting the Glusterfs to its self reported max-op-version - """ - name = "salt" - current = 30707 - new = 31200 - - ret = {"name": name, "result": False, "comment": "", "changes": {}} - - mock_get_version = MagicMock(return_value={}) - mock_get_max_op_version = MagicMock(return_value={}) - mock_set_version = MagicMock(return_value={}) - - with patch.dict( - glusterfs.__salt__, - { - "glusterfs.get_op_version": mock_get_version, - "glusterfs.set_op_version": mock_set_version, - "glusterfs.get_max_op_version": mock_get_max_op_version, - }, - ): - mock_get_version.return_value = [False, "some error message"] - ret.update({"result": False}) - ret.update({"comment": "some error message"}) - self.assertDictEqual(glusterfs.max_op_version(name), ret) - - mock_get_version.return_value = current - mock_get_max_op_version.return_value = [False, "some error message"] - ret.update({"result": False}) - ret.update({"comment": "some error message"}) - self.assertDictEqual(glusterfs.max_op_version(name), ret) - - mock_get_version.return_value = current - mock_get_max_op_version.return_value = current - ret.update({"result": True}) - ret.update( - { - "comment": ( - "The cluster.op-version is already set to the" - " cluster.max-op-version of {}".format(current) - ) - } - ) - self.assertDictEqual(glusterfs.max_op_version(name), ret) - - with patch.dict(glusterfs.__opts__, {"test": True}): - mock_get_max_op_version.return_value = new - ret.update({"result": None}) - ret.update( - { - "comment": ( - "An attempt would be made to set the cluster.op-version" - " to {}.".format(new) - ) - } - ) - self.assertDictEqual(glusterfs.max_op_version(name), ret) - - with patch.dict(glusterfs.__opts__, {"test": False}): - mock_set_version.return_value = [False, "Failed to set version"] - ret.update({"result": False}) - ret.update({"comment": "Failed to set version"}) - self.assertDictEqual(glusterfs.max_op_version(name), ret) - - mock_set_version.return_value = "some success message" - ret.update({"comment": "some success message"}) - ret.update({"changes": {"old": current, "new": new}}) - ret.update({"result": True}) - self.assertDictEqual(glusterfs.max_op_version(name), ret) diff --git a/tests/unit/states/test_gnomedesktop.py b/tests/unit/states/test_gnomedesktop.py deleted file mode 100644 index baf0f37b5874..000000000000 --- a/tests/unit/states/test_gnomedesktop.py +++ /dev/null @@ -1,48 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import salt.states.gnomedesktop as gnomedesktop -from tests.support.unit import TestCase - - -class GnomedesktopTestCase(TestCase): - """ - Test cases for salt.states.gnomedesktop - """ - - # 'wm_preferences' function tests: 1 - - def test_wm_preferences(self): - """ - Test to sets values in the org.gnome.desktop.wm.preferences schema - """ - name = "salt" - - ret = {"name": name, "result": True, "comment": "", "changes": {}} - - self.assertDictEqual(gnomedesktop.wm_preferences(name), ret) - - # 'desktop_lockdown' function tests: 1 - - def test_desktop_lockdown(self): - """ - Test to sets values in the org.gnome.desktop.lockdown schema - """ - name = "salt" - - ret = {"name": name, "result": True, "comment": "", "changes": {}} - - self.assertDictEqual(gnomedesktop.desktop_lockdown(name), ret) - - # 'desktop_interface' function tests: 1 - - def test_desktop_interface(self): - """ - Test to sets values in the org.gnome.desktop.interface schema - """ - name = "salt" - - ret = {"name": name, "result": True, "comment": "", "changes": {}} - - self.assertDictEqual(gnomedesktop.desktop_interface(name), ret) diff --git a/tests/unit/states/test_heat.py b/tests/unit/states/test_heat.py index c36bc2e98b56..b6b644a665f2 100644 --- a/tests/unit/states/test_heat.py +++ b/tests/unit/states/test_heat.py @@ -5,7 +5,6 @@ import salt.modules.win_file as win_file import salt.states.heat as heat import salt.utils.platform -import salt.utils.win_dacl as dacl import tests.unit.modules.test_heat from tests.support.mixins import LoaderModuleMockMixin from tests.support.mock import MagicMock, patch @@ -38,10 +37,7 @@ def setup_loader_modules(self): "config.backup_mode": MagicMock(return_value=False), }, }, - win_file: { - "__utils__": {"dacl.check_perms": salt.utils.win_dacl.check_perms} - }, - dacl: {"__opts__": {"test": False}}, + win_file: {"__opts__": {"test": False}}, } def setUp(self): diff --git a/tests/unit/states/test_helm.py b/tests/unit/states/test_helm.py deleted file mode 100644 index a96ffa83b26c..000000000000 --- a/tests/unit/states/test_helm.py +++ /dev/null @@ -1,255 +0,0 @@ -import salt.states.helm as helm -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class HelmTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.modules.helm - """ - - def setup_loader_modules(self): - return {helm: {}} - - def test_repo_managed_import_failed_repo_manage(self): - ret = { - "name": "state_id", - "changes": {}, - "result": False, - "comment": "'helm.repo_manage' modules not available on this minion.", - } - self.assertEqual(helm.repo_managed("state_id"), ret) - - def test_repo_managed_import_failed_repo_update(self): - mock_helm_modules = {"helm.repo_manage": MagicMock(return_value=True)} - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "changes": {}, - "result": False, - "comment": "'helm.repo_update' modules not available on this minion.", - } - self.assertEqual(helm.repo_managed("state_id"), ret) - - def test_repo_managed_is_testing(self): - mock_helm_modules = { - "helm.repo_manage": MagicMock(return_value=True), - "helm.repo_update": MagicMock(return_value=True), - } - with patch.dict(helm.__salt__, mock_helm_modules): - mock__opts__ = {"test": MagicMock(return_value=True)} - with patch.dict(helm.__opts__, mock__opts__): - ret = { - "name": "state_id", - "result": None, - "comment": "Helm repo would have been managed.", - "changes": {}, - } - self.assertEqual(helm.repo_managed("state_id"), ret) - - def test_repo_managed_success(self): - result_changes = {"added": True, "removed": True, "failed": False} - mock_helm_modules = { - "helm.repo_manage": MagicMock(return_value=result_changes), - "helm.repo_update": MagicMock(return_value=True), - } - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "result": True, - "comment": "Repositories were added or removed.", - "changes": result_changes, - } - self.assertEqual(helm.repo_managed("state_id"), ret) - - def test_repo_managed_success_with_update(self): - result_changes = {"added": True, "removed": True, "failed": False} - mock_helm_modules = { - "helm.repo_manage": MagicMock(return_value=result_changes), - "helm.repo_update": MagicMock(return_value=True), - } - result_wanted = result_changes - result_wanted.update({"repo_update": True}) - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "result": True, - "comment": "Repositories were added or removed.", - "changes": result_wanted, - } - self.assertEqual(helm.repo_managed("state_id"), ret) - - def test_repo_managed_failed(self): - result_changes = {"added": True, "removed": True, "failed": True} - mock_helm_modules = { - "helm.repo_manage": MagicMock(return_value=result_changes), - "helm.repo_update": MagicMock(return_value=True), - } - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "result": False, - "comment": "Failed to add or remove some repositories.", - "changes": result_changes, - } - self.assertEqual(helm.repo_managed("state_id"), ret) - - def test_repo_updated_import_failed(self): - ret = { - "name": "state_id", - "changes": {}, - "result": False, - "comment": "'helm.repo_update' modules not available on this minion.", - } - self.assertEqual(helm.repo_updated("state_id"), ret) - - def test_repo_updated_is_testing(self): - mock_helm_modules = {"helm.repo_update": MagicMock(return_value=True)} - with patch.dict(helm.__salt__, mock_helm_modules): - mock__opts__ = {"test": MagicMock(return_value=True)} - with patch.dict(helm.__opts__, mock__opts__): - ret = { - "name": "state_id", - "result": None, - "comment": "Helm repo would have been updated.", - "changes": {}, - } - self.assertEqual(helm.repo_updated("state_id"), ret) - - def test_repo_updated_success(self): - mock_helm_modules = {"helm.repo_update": MagicMock(return_value=True)} - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "result": True, - "comment": "Helm repo is updated.", - "changes": {}, - } - self.assertEqual(helm.repo_updated("state_id"), ret) - - def test_repo_updated_failed(self): - mock_helm_modules = {"helm.repo_update": MagicMock(return_value=False)} - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "result": False, - "comment": "Failed to sync some repositories.", - "changes": False, - } - self.assertEqual(helm.repo_updated("state_id"), ret) - - def test_release_present_import_failed_helm_status(self): - ret = { - "name": "state_id", - "changes": {}, - "result": False, - "comment": "'helm.status' modules not available on this minion.", - } - self.assertEqual(helm.release_present("state_id", "mychart"), ret) - - def test_release_present_import_failed_helm_install(self): - mock_helm_modules = {"helm.status": MagicMock(return_value=True)} - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "changes": {}, - "result": False, - "comment": "'helm.install' modules not available on this minion.", - } - self.assertEqual(helm.release_present("state_id", "mychart"), ret) - - def test_release_present_import_failed_helm_upgrade(self): - mock_helm_modules = { - "helm.status": MagicMock(return_value=True), - "helm.install": MagicMock(return_value=True), - } - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "changes": {}, - "result": False, - "comment": "'helm.upgrade' modules not available on this minion.", - } - self.assertEqual(helm.release_present("state_id", "mychart"), ret) - - def test_release_present_is_testing(self): - mock_helm_modules = { - "helm.status": MagicMock(return_value=True), - "helm.install": MagicMock(return_value=True), - "helm.upgrade": MagicMock(return_value=True), - } - with patch.dict(helm.__salt__, mock_helm_modules): - mock__opts__ = {"test": MagicMock(return_value=True)} - with patch.dict(helm.__opts__, mock__opts__): - ret = { - "name": "state_id", - "result": None, - "comment": "Helm release would have been installed or updated.", - "changes": {}, - } - self.assertEqual(helm.release_present("state_id", "mychart"), ret) - - def test_release_absent_import_failed_helm_uninstall(self): - ret = { - "name": "state_id", - "changes": {}, - "result": False, - "comment": "'helm.uninstall' modules not available on this minion.", - } - self.assertEqual(helm.release_absent("state_id"), ret) - - def test_release_absent_import_failed_helm_status(self): - mock_helm_modules = {"helm.uninstall": MagicMock(return_value=True)} - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "changes": {}, - "result": False, - "comment": "'helm.status' modules not available on this minion.", - } - self.assertEqual(helm.release_absent("state_id"), ret) - - def test_release_absent_is_testing(self): - mock_helm_modules = { - "helm.status": MagicMock(return_value=True), - "helm.uninstall": MagicMock(return_value=True), - } - with patch.dict(helm.__salt__, mock_helm_modules): - mock__opts__ = {"test": MagicMock(return_value=True)} - with patch.dict(helm.__opts__, mock__opts__): - ret = { - "name": "state_id", - "result": None, - "comment": "Helm release would have been uninstalled.", - "changes": {}, - } - self.assertEqual(helm.release_absent("state_id"), ret) - - def test_release_absent_success(self): - mock_helm_modules = { - "helm.status": MagicMock(return_value={}), - "helm.uninstall": MagicMock(return_value=True), - } - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "result": True, - "comment": "Helm release state_id is absent.", - "changes": {"absent": "state_id"}, - } - self.assertEqual(helm.release_absent("state_id"), ret) - - def test_release_absent_error(self): - mock_helm_modules = { - "helm.status": MagicMock(return_value={}), - "helm.uninstall": MagicMock(return_value="error"), - } - with patch.dict(helm.__salt__, mock_helm_modules): - ret = { - "name": "state_id", - "result": False, - "comment": "error", - "changes": {}, - } - self.assertEqual(helm.release_absent("state_id"), ret) diff --git a/tests/unit/states/test_hg.py b/tests/unit/states/test_hg.py deleted file mode 100644 index 2aac3e4bd271..000000000000 --- a/tests/unit/states/test_hg.py +++ /dev/null @@ -1,140 +0,0 @@ -""" - :codeauthor: Rahul Handay -""" - -import os - -import salt.states.hg as hg -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class HgTestCase(TestCase, LoaderModuleMockMixin): - """ - Validate the svn state - """ - - def setup_loader_modules(self): - return {hg: {}} - - def test_latest(self): - """ - Test to Make sure the repository is cloned to - the given directory and is up to date - """ - ret = {"changes": {}, "comment": "", "name": "salt", "result": True} - mock = MagicMock(return_value=True) - with patch.object(hg, "_fail", mock): - self.assertTrue(hg.latest("salt")) - - mock = MagicMock(side_effect=[False, True, False, False, False, False]) - with patch.object(os.path, "isdir", mock): - mock = MagicMock(return_value=True) - with patch.object(hg, "_handle_existing", mock): - self.assertTrue(hg.latest("salt", target="c:\\salt")) - - with patch.dict(hg.__opts__, {"test": True}): - mock = MagicMock(return_value=True) - with patch.object(hg, "_neutral_test", mock): - self.assertTrue(hg.latest("salt", target="c:\\salt")) - - with patch.dict(hg.__opts__, {"test": False}): - mock = MagicMock(return_value=True) - with patch.object(hg, "_clone_repo", mock): - self.assertDictEqual(hg.latest("salt", target="c:\\salt"), ret) - - def test_latest_update_changes(self): - """ - Test to make sure we don't update even if we have changes - """ - ret = {"changes": {}, "comment": "", "name": "salt", "result": True} - revision_mock = MagicMock(return_value="abcdef") - pull_mock = MagicMock(return_value="Blah.") - update_mock = MagicMock() - - with patch.dict( - hg.__salt__, - { - "hg.revision": revision_mock, - "hg.pull": pull_mock, - "hg.update": update_mock, - }, - ): - mock = MagicMock(side_effect=[True, True]) - with patch.object(os.path, "isdir", mock): - mock = MagicMock(return_value=True) - with patch.dict(hg.__opts__, {"test": False}): - with patch.object(hg, "_clone_repo", mock): - self.assertDictEqual( - hg.latest("salt", target="c:\\salt", update_head=True), ret - ) - assert update_mock.called - - def test_latest_no_update_changes(self): - """ - Test to make sure we don't update even if we have changes - """ - ret = { - "changes": {}, - "comment": ( - "Update is probably required but update_head=False so we will skip" - " updating." - ), - "name": "salt", - "result": True, - } - revision_mock = MagicMock(return_value="abcdef") - pull_mock = MagicMock(return_value="Blah.") - update_mock = MagicMock() - - with patch.dict( - hg.__salt__, - { - "hg.revision": revision_mock, - "hg.pull": pull_mock, - "hg.update": update_mock, - }, - ): - mock = MagicMock(side_effect=[True, True]) - with patch.object(os.path, "isdir", mock): - mock = MagicMock(return_value=True) - with patch.dict(hg.__opts__, {"test": False}): - with patch.object(hg, "_clone_repo", mock): - self.assertDictEqual( - hg.latest("salt", target="c:\\salt", update_head=False), ret - ) - assert not update_mock.called - - def test_latest_no_update_no_changes(self): - """ - Test to Make sure the repository is cloned to - the given directory and is up to date - """ - ret = { - "changes": {}, - "comment": "No changes found and update_head=False so will skip updating.", - "name": "salt", - "result": True, - } - revision_mock = MagicMock(return_value="abcdef") - pull_mock = MagicMock(return_value="Blah no changes found.") - update_mock = MagicMock() - - with patch.dict( - hg.__salt__, - { - "hg.revision": revision_mock, - "hg.pull": pull_mock, - "hg.update": update_mock, - }, - ): - mock = MagicMock(side_effect=[True, True]) - with patch.object(os.path, "isdir", mock): - mock = MagicMock(return_value=True) - with patch.dict(hg.__opts__, {"test": False}): - with patch.object(hg, "_clone_repo", mock): - self.assertDictEqual( - hg.latest("salt", target="c:\\salt", update_head=False), ret - ) - assert not update_mock.called diff --git a/tests/unit/states/test_ini_manage.py b/tests/unit/states/test_ini_manage.py deleted file mode 100644 index ce08736eb648..000000000000 --- a/tests/unit/states/test_ini_manage.py +++ /dev/null @@ -1,172 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import salt.states.ini_manage as ini_manage -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -# pylint: disable=no-member - - -class IniManageTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.ini_manage - """ - - def setup_loader_modules(self): - return {ini_manage: {}} - - # 'options_present' function tests: 1 - - def test_options_present(self): - """ - Test to verify options present in file. - """ - name = "salt" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - with patch.dict(ini_manage.__opts__, {"test": True}): - comt = "" - ret.update({"comment": comt, "result": True}) - self.assertDictEqual(ini_manage.options_present(name), ret) - - changes = { - "first": "who is on", - "second": "what is on", - "third": "I don't know", - } - with patch.dict( - ini_manage.__salt__, {"ini.set_option": MagicMock(return_value=changes)} - ): - with patch.dict(ini_manage.__opts__, {"test": False}): - comt = "Changes take effect" - ret.update({"comment": comt, "result": True, "changes": changes}) - self.assertDictEqual(ini_manage.options_present(name), ret) - - original = { - "mysection": { - "first": "who is on", - "second": "what is on", - "third": "I don't know", - } - } - desired = {"mysection": {"first": "who is on", "second": "what is on"}} - changes = { - "mysection": { - "first": "who is on", - "second": "what is on", - "third": {"after": None, "before": "I don't know"}, - } - } - with patch.dict( - ini_manage.__salt__, {"ini.get_ini": MagicMock(return_value=original)} - ): - with patch.dict( - ini_manage.__salt__, - {"ini.remove_option": MagicMock(return_value="third")}, - ): - with patch.dict( - ini_manage.__salt__, - {"ini.get_option": MagicMock(return_value="I don't know")}, - ): - with patch.dict( - ini_manage.__salt__, - {"ini.set_option": MagicMock(return_value=desired)}, - ): - with patch.dict(ini_manage.__opts__, {"test": False}): - comt = "Changes take effect" - ret.update( - {"comment": comt, "result": True, "changes": changes} - ) - self.assertDictEqual( - ini_manage.options_present(name, desired, strict=True), - ret, - ) - - # 'options_absent' function tests: 1 - - def test_options_absent(self): - """ - Test to verify options absent in file. - """ - name = "salt" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - with patch.dict(ini_manage.__opts__, {"test": True}): - comt = "No changes detected." - ret.update({"comment": comt, "result": True}) - self.assertDictEqual(ini_manage.options_absent(name), ret) - - with patch.dict(ini_manage.__opts__, {"test": False}): - comt = "No anomaly detected" - ret.update({"comment": comt, "result": True}) - self.assertDictEqual(ini_manage.options_absent(name), ret) - original = {"Tables": {"key1": "1", "key2": "2", "key3": "3", "key4": "4"}} - sections = {"Tables": ["key2", "key3"]} - changes = {"Tables": {"key2": "2", "key3": "3"}} - with patch.dict( - ini_manage.__salt__, - {"ini.remove_option": MagicMock(side_effect=["2", "3"])}, - ): - with patch.dict(ini_manage.__opts__, {"test": False}): - comt = "Changes take effect" - ret.update({"comment": comt, "result": True, "changes": changes}) - self.assertDictEqual(ini_manage.options_absent(name, sections), ret) - - # 'sections_present' function tests: 1 - - def test_sections_present(self): - """ - Test to verify sections present in file. - """ - name = "salt" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - with patch.dict(ini_manage.__opts__, {"test": True}): - with patch.dict( - ini_manage.__salt__, {"ini.get_ini": MagicMock(return_value=None)} - ): - comt = "No changes detected." - ret.update({"comment": comt, "result": True}) - self.assertDictEqual(ini_manage.sections_present(name), ret) - - changes = { - "first": "who is on", - "second": "what is on", - "third": "I don't know", - } - with patch.dict( - ini_manage.__salt__, {"ini.set_option": MagicMock(return_value=changes)} - ): - with patch.dict(ini_manage.__opts__, {"test": False}): - comt = "Changes take effect" - ret.update({"comment": comt, "result": True, "changes": changes}) - self.assertDictEqual(ini_manage.sections_present(name), ret) - - # 'sections_absent' function tests: 1 - - def test_sections_absent(self): - """ - Test to verify sections absent in file. - """ - name = "salt" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - with patch.dict(ini_manage.__opts__, {"test": True}): - with patch.dict( - ini_manage.__salt__, {"ini.get_ini": MagicMock(return_value=None)} - ): - comt = "No changes detected." - ret.update({"comment": comt, "result": True}) - self.assertDictEqual(ini_manage.sections_absent(name), ret) - - with patch.dict(ini_manage.__opts__, {"test": False}): - comt = "No anomaly detected" - ret.update({"comment": comt, "result": True}) - self.assertDictEqual(ini_manage.sections_absent(name), ret) diff --git a/tests/unit/states/test_ipmi.py b/tests/unit/states/test_ipmi.py deleted file mode 100644 index b6aea2a0452b..000000000000 --- a/tests/unit/states/test_ipmi.py +++ /dev/null @@ -1,176 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import salt.states.ipmi as ipmi -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class IpmiTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.ipmi - """ - - def setup_loader_modules(self): - return {ipmi: {}} - - # 'boot_device' function tests: 1 - - def test_boot_device(self): - """ - Test to request power state change. - """ - name = "salt" - - ret = {"name": name, "result": True, "comment": "", "changes": {}} - - mock = MagicMock(return_value=name) - with patch.dict( - ipmi.__salt__, {"ipmi.get_bootdev": mock, "ipmi.set_bootdev": mock} - ): - comt = "system already in this state" - ret.update({"comment": comt}) - self.assertDictEqual(ipmi.boot_device(name), ret) - - with patch.dict(ipmi.__opts__, {"test": False}): - comt = "changed boot device" - ret.update( - { - "name": "default", - "comment": comt, - "result": True, - "changes": {"new": "default", "old": "salt"}, - } - ) - self.assertDictEqual(ipmi.boot_device(), ret) - - with patch.dict(ipmi.__opts__, {"test": True}): - comt = "would change boot device" - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(ipmi.boot_device(), ret) - - # 'power' function tests: 1 - - def test_power(self): - """ - Test to request power state change - """ - ret = {"name": "power_on", "result": True, "comment": "", "changes": {}} - - mock = MagicMock(return_value="on") - with patch.dict( - ipmi.__salt__, {"ipmi.get_power": mock, "ipmi.set_power": mock} - ): - comt = "system already in this state" - ret.update({"comment": comt}) - self.assertDictEqual(ipmi.power(), ret) - - with patch.dict(ipmi.__opts__, {"test": False}): - comt = "changed system power" - ret.update( - { - "name": "off", - "comment": comt, - "result": True, - "changes": {"new": "off", "old": "on"}, - } - ) - self.assertDictEqual(ipmi.power("off"), ret) - - with patch.dict(ipmi.__opts__, {"test": True}): - comt = "would power: off system" - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(ipmi.power("off"), ret) - - # 'user_present' function tests: 1 - - def test_user_present(self): - """ - Test to ensure IPMI user and user privileges. - """ - name = "salt" - - ret = {"name": name, "result": True, "comment": "", "changes": {}} - - mock_ret = { - "access": { - "callback": False, - "link_auth": True, - "ipmi_msg": True, - "privilege_level": "administrator", - } - } - mock = MagicMock(return_value=mock_ret) - mock_bool = MagicMock(side_effect=[True, False, False, False]) - with patch.dict( - ipmi.__salt__, - { - "ipmi.get_user": mock, - "ipmi.set_user_password": mock_bool, - "ipmi.ensure_user": mock_bool, - }, - ): - comt = "user already present" - ret.update({"comment": comt}) - self.assertDictEqual(ipmi.user_present(name, 5, "salt@123"), ret) - - with patch.dict(ipmi.__opts__, {"test": True}): - comt = "would (re)create user" - ret.update( - { - "comment": comt, - "result": None, - "changes": {"new": "salt", "old": mock_ret}, - } - ) - self.assertDictEqual(ipmi.user_present(name, 5, "pw@123"), ret) - - with patch.dict(ipmi.__opts__, {"test": False}): - comt = "(re)created user" - ret.update( - { - "comment": comt, - "result": True, - "changes": {"new": mock_ret, "old": mock_ret}, - } - ) - self.assertDictEqual(ipmi.user_present(name, 5, "pw@123"), ret) - - # 'user_absent' function tests: 1 - - def test_user_absent(self): - """ - Test to delete all user (uid) records having the matching name. - """ - name = "salt" - - ret = {"name": name, "result": True, "comment": "", "changes": {}} - - mock = MagicMock(side_effect=[[], [5], [5]]) - mock_bool = MagicMock(return_value=True) - with patch.dict( - ipmi.__salt__, {"ipmi.get_name_uids": mock, "ipmi.delete_user": mock_bool} - ): - comt = "user already absent" - ret.update({"comment": comt}) - self.assertDictEqual(ipmi.user_absent(name), ret) - - with patch.dict(ipmi.__opts__, {"test": True}): - comt = "would delete user(s)" - ret.update( - {"comment": comt, "result": None, "changes": {"delete": [5]}} - ) - self.assertDictEqual(ipmi.user_absent(name), ret) - - with patch.dict(ipmi.__opts__, {"test": False}): - comt = "user(s) removed" - ret.update( - { - "comment": comt, - "result": False, - "changes": {"new": "None", "old": [5]}, - } - ) - self.assertDictEqual(ipmi.user_absent(name), ret) diff --git a/tests/unit/states/test_jboss7.py b/tests/unit/states/test_jboss7.py deleted file mode 100644 index 607ba4f42427..000000000000 --- a/tests/unit/states/test_jboss7.py +++ /dev/null @@ -1,745 +0,0 @@ -# pylint: disable=unused-argument - - -import salt.states.jboss7 as jboss7 -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class JBoss7StateTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - return { - jboss7: { - "__salt__": { - "jboss7.read_datasource": MagicMock(), - "jboss7.create_datasource": MagicMock(), - "jboss7.update_datasource": MagicMock(), - "jboss7.remove_datasource": MagicMock(), - "jboss7.read_simple_binding": MagicMock(), - "jboss7.create_simple_binding": MagicMock(), - "jboss7.update_simple_binding": MagicMock(), - "jboss7.undeploy": MagicMock(), - "jboss7.deploy": MagicMock, - "file.get_managed": MagicMock, - "file.manage_file": MagicMock, - "jboss7.list_deployments": MagicMock, - }, - "__env__": "base", - } - } - - def test_should_not_redeploy_unchanged(self): - # given - parameters = { - "target_file": "some_artifact", - "undeploy_force": False, - "undeploy": "some_artifact", - "source": "some_artifact_on_master", - } - jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"} - - def list_deployments(jboss_config): - return ["some_artifact"] - - def file_get_managed( - name, - template, - source, - source_hash, - source_hash_name, - user, - group, - mode, - attrs, - saltenv, - context, - defaults, - skip_verify, - kwargs, - ): - return "sfn", "hash", "" - - def file_manage_file( - name, - sfn, - ret, - source, - source_sum, - user, - group, - mode, - attrs, - saltenv, - backup, - makedirs, - template, - show_diff, - contents, - dir_mode, - ): - return {"result": True, "changes": False} - - jboss7_undeploy_mock = MagicMock() - jboss7_deploy_mock = MagicMock() - file_get_managed = MagicMock(side_effect=file_get_managed) - file_manage_file = MagicMock(side_effect=file_manage_file) - list_deployments_mock = MagicMock(side_effect=list_deployments) - with patch.dict( - jboss7.__salt__, - { - "jboss7.undeploy": jboss7_undeploy_mock, - "jboss7.deploy": jboss7_deploy_mock, - "file.get_managed": file_get_managed, - "file.manage_file": file_manage_file, - "jboss7.list_deployments": list_deployments_mock, - }, - ): - # when - result = jboss7.deployed( - name="unchanged", jboss_config=jboss_conf, salt_source=parameters - ) - - # then - self.assertFalse(jboss7_undeploy_mock.called) - self.assertFalse(jboss7_deploy_mock.called) - - def test_should_redeploy_changed(self): - # given - parameters = { - "target_file": "some_artifact", - "undeploy_force": False, - "undeploy": "some_artifact", - "source": "some_artifact_on_master", - } - jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"} - - def list_deployments(jboss_config): - return ["some_artifact"] - - def file_get_managed( - name, - template, - source, - source_hash, - source_hash_name, - user, - group, - mode, - attrs, - saltenv, - context, - defaults, - skip_verify, - kwargs, - ): - return "sfn", "hash", "" - - def file_manage_file( - name, - sfn, - ret, - source, - source_sum, - user, - group, - mode, - attrs, - saltenv, - backup, - makedirs, - template, - show_diff, - contents, - dir_mode, - ): - return {"result": True, "changes": True} - - jboss7_undeploy_mock = MagicMock() - jboss7_deploy_mock = MagicMock() - file_get_managed = MagicMock(side_effect=file_get_managed) - file_manage_file = MagicMock(side_effect=file_manage_file) - list_deployments_mock = MagicMock(side_effect=list_deployments) - with patch.dict( - jboss7.__salt__, - { - "jboss7.undeploy": jboss7_undeploy_mock, - "jboss7.deploy": jboss7_deploy_mock, - "file.get_managed": file_get_managed, - "file.manage_file": file_manage_file, - "jboss7.list_deployments": list_deployments_mock, - }, - ): - # when - result = jboss7.deployed( - name="unchanged", jboss_config=jboss_conf, salt_source=parameters - ) - - # then - self.assertTrue(jboss7_undeploy_mock.called) - self.assertTrue(jboss7_deploy_mock.called) - - def test_should_deploy_different_artifact(self): - # given - parameters = { - "target_file": "some_artifact", - "undeploy_force": False, - "undeploy": "some_artifact", - "source": "some_artifact_on_master", - } - jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"} - - def list_deployments(jboss_config): - return ["some_other_artifact"] - - def file_get_managed( - name, - template, - source, - source_hash, - source_hash_name, - user, - group, - mode, - attrs, - saltenv, - context, - defaults, - skip_verify, - kwargs, - ): - return "sfn", "hash", "" - - def file_manage_file( - name, - sfn, - ret, - source, - source_sum, - user, - group, - mode, - attrs, - saltenv, - backup, - makedirs, - template, - show_diff, - contents, - dir_mode, - ): - return {"result": True, "changes": False} - - jboss7_undeploy_mock = MagicMock() - jboss7_deploy_mock = MagicMock() - file_get_managed = MagicMock(side_effect=file_get_managed) - file_manage_file = MagicMock(side_effect=file_manage_file) - list_deployments_mock = MagicMock(side_effect=list_deployments) - with patch.dict( - jboss7.__salt__, - { - "jboss7.undeploy": jboss7_undeploy_mock, - "jboss7.deploy": jboss7_deploy_mock, - "file.get_managed": file_get_managed, - "file.manage_file": file_manage_file, - "jboss7.list_deployments": list_deployments_mock, - }, - ): - # when - result = jboss7.deployed( - name="unchanged", jboss_config=jboss_conf, salt_source=parameters - ) - - # then - self.assertFalse(jboss7_undeploy_mock.called) - self.assertTrue(jboss7_deploy_mock.called) - - def test_should_redploy_undeploy_force(self): - # given - parameters = { - "target_file": "some_artifact", - "undeploy_force": True, - "undeploy": "some_artifact", - "source": "some_artifact_on_master", - } - jboss_conf = {"cli_path": "somewhere", "controller": "some_controller"} - - def list_deployments(jboss_config): - return ["some_artifact"] - - def file_get_managed( - name, - template, - source, - source_hash, - source_hash_name, - user, - group, - mode, - attrs, - saltenv, - context, - defaults, - skip_verify, - kwargs, - ): - return "sfn", "hash", "" - - def file_manage_file( - name, - sfn, - ret, - source, - source_sum, - user, - group, - mode, - attrs, - saltenv, - backup, - makedirs, - template, - show_diff, - contents, - dir_mode, - ): - return {"result": True, "changes": False} - - jboss7_undeploy_mock = MagicMock() - jboss7_deploy_mock = MagicMock() - file_get_managed = MagicMock(side_effect=file_get_managed) - file_manage_file = MagicMock(side_effect=file_manage_file) - list_deployments_mock = MagicMock(side_effect=list_deployments) - with patch.dict( - jboss7.__salt__, - { - "jboss7.undeploy": jboss7_undeploy_mock, - "jboss7.deploy": jboss7_deploy_mock, - "file.get_managed": file_get_managed, - "file.manage_file": file_manage_file, - "jboss7.list_deployments": list_deployments_mock, - }, - ): - # when - result = jboss7.deployed( - name="unchanged", jboss_config=jboss_conf, salt_source=parameters - ) - - # then - self.assertTrue(jboss7_undeploy_mock.called) - self.assertTrue(jboss7_deploy_mock.called) - - def test_should_create_new_datasource_if_not_exists(self): - # given - datasource_properties = {"connection-url": "jdbc:/old-connection-url"} - ds_status = {"created": False} - - def read_func(jboss_config, name, profile): - if ds_status["created"]: - return {"success": True, "result": datasource_properties} - else: - return {"success": False, "err_code": "JBAS014807"} - - def create_func(jboss_config, name, datasource_properties, profile): - ds_status["created"] = True - return {"success": True} - - read_mock = MagicMock(side_effect=read_func) - create_mock = MagicMock(side_effect=create_func) - update_mock = MagicMock() - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_datasource": read_mock, - "jboss7.create_datasource": create_mock, - "jboss7.update_datasource": update_mock, - }, - ): - - # when - result = jboss7.datasource_exists( - name="appDS", - jboss_config={}, - datasource_properties=datasource_properties, - profile=None, - ) - - # then - create_mock.assert_called_with( - name="appDS", - jboss_config={}, - datasource_properties=datasource_properties, - profile=None, - ) - - self.assertFalse(update_mock.called) - self.assertEqual(result["comment"], "Datasource created.") - - def test_should_update_the_datasource_if_exists(self): - ds_status = {"updated": False} - - def read_func(jboss_config, name, profile): - if ds_status["updated"]: - return { - "success": True, - "result": {"connection-url": "jdbc:/new-connection-url"}, - } - else: - return { - "success": True, - "result": {"connection-url": "jdbc:/old-connection-url"}, - } - - def update_func(jboss_config, name, new_properties, profile): - ds_status["updated"] = True - return {"success": True} - - read_mock = MagicMock(side_effect=read_func) - create_mock = MagicMock() - update_mock = MagicMock(side_effect=update_func) - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_datasource": read_mock, - "jboss7.create_datasource": create_mock, - "jboss7.update_datasource": update_mock, - }, - ): - result = jboss7.datasource_exists( - name="appDS", - jboss_config={}, - datasource_properties={"connection-url": "jdbc:/new-connection-url"}, - profile=None, - ) - - update_mock.assert_called_with( - name="appDS", - jboss_config={}, - new_properties={"connection-url": "jdbc:/new-connection-url"}, - profile=None, - ) - self.assertTrue(read_mock.called) - self.assertEqual(result["comment"], "Datasource updated.") - - def test_should_recreate_the_datasource_if_specified(self): - read_mock = MagicMock( - return_value={ - "success": True, - "result": {"connection-url": "jdbc:/same-connection-url"}, - } - ) - create_mock = MagicMock(return_value={"success": True}) - remove_mock = MagicMock(return_value={"success": True}) - update_mock = MagicMock() - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_datasource": read_mock, - "jboss7.create_datasource": create_mock, - "jboss7.remove_datasource": remove_mock, - "jboss7.update_datasource": update_mock, - }, - ): - result = jboss7.datasource_exists( - name="appDS", - jboss_config={}, - datasource_properties={"connection-url": "jdbc:/same-connection-url"}, - recreate=True, - ) - - remove_mock.assert_called_with(name="appDS", jboss_config={}, profile=None) - create_mock.assert_called_with( - name="appDS", - jboss_config={}, - datasource_properties={"connection-url": "jdbc:/same-connection-url"}, - profile=None, - ) - self.assertEqual(result["changes"]["removed"], "appDS") - self.assertEqual(result["changes"]["created"], "appDS") - - def test_should_inform_if_the_datasource_has_not_changed(self): - read_mock = MagicMock( - return_value={ - "success": True, - "result": {"connection-url": "jdbc:/same-connection-url"}, - } - ) - create_mock = MagicMock() - remove_mock = MagicMock() - update_mock = MagicMock(return_value={"success": True}) - - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_datasource": read_mock, - "jboss7.create_datasource": create_mock, - "jboss7.remove_datasource": remove_mock, - "jboss7.update_datasource": update_mock, - }, - ): - result = jboss7.datasource_exists( - name="appDS", - jboss_config={}, - datasource_properties={"connection-url": "jdbc:/old-connection-url"}, - ) - - update_mock.assert_called_with( - name="appDS", - jboss_config={}, - new_properties={"connection-url": "jdbc:/old-connection-url"}, - profile=None, - ) - self.assertFalse(create_mock.called) - self.assertEqual(result["comment"], "Datasource not changed.") - - def test_should_create_binding_if_not_exists(self): - # given - binding_status = {"created": False} - - def read_func(jboss_config, binding_name, profile): - if binding_status["created"]: - return {"success": True, "result": {"value": "DEV"}} - else: - return {"success": False, "err_code": "JBAS014807"} - - def create_func(jboss_config, binding_name, value, profile): - binding_status["created"] = True - return {"success": True} - - read_mock = MagicMock(side_effect=read_func) - create_mock = MagicMock(side_effect=create_func) - update_mock = MagicMock() - - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_simple_binding": read_mock, - "jboss7.create_simple_binding": create_mock, - "jboss7.update_simple_binding": update_mock, - }, - ): - - # when - result = jboss7.bindings_exist( - name="bindings", jboss_config={}, bindings={"env": "DEV"}, profile=None - ) - - # then - create_mock.assert_called_with( - jboss_config={}, binding_name="env", value="DEV", profile=None - ) - self.assertEqual(update_mock.call_count, 0) - self.assertEqual(result["changes"], {"added": "env:DEV\n"}) - self.assertEqual(result["comment"], "Bindings changed.") - - def test_should_update_bindings_if_exists_and_different(self): - # given - binding_status = {"updated": False} - - def read_func(jboss_config, binding_name, profile): - if binding_status["updated"]: - return {"success": True, "result": {"value": "DEV2"}} - else: - return {"success": True, "result": {"value": "DEV"}} - - def update_func(jboss_config, binding_name, value, profile): - binding_status["updated"] = True - return {"success": True} - - read_mock = MagicMock(side_effect=read_func) - create_mock = MagicMock() - update_mock = MagicMock(side_effect=update_func) - - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_simple_binding": read_mock, - "jboss7.create_simple_binding": create_mock, - "jboss7.update_simple_binding": update_mock, - }, - ): - # when - result = jboss7.bindings_exist( - name="bindings", jboss_config={}, bindings={"env": "DEV2"}, profile=None - ) - - # then - update_mock.assert_called_with( - jboss_config={}, binding_name="env", value="DEV2", profile=None - ) - self.assertEqual(create_mock.call_count, 0) - self.assertEqual(result["changes"], {"changed": "env:DEV->DEV2\n"}) - self.assertEqual(result["comment"], "Bindings changed.") - - def test_should_not_update_bindings_if_same(self): - # given - read_mock = MagicMock( - return_value={"success": True, "result": {"value": "DEV2"}} - ) - create_mock = MagicMock() - update_mock = MagicMock() - - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_simple_binding": read_mock, - "jboss7.create_simple_binding": create_mock, - "jboss7.update_simple_binding": update_mock, - }, - ): - # when - result = jboss7.bindings_exist( - name="bindings", jboss_config={}, bindings={"env": "DEV2"} - ) - - # then - self.assertEqual(create_mock.call_count, 0) - self.assertEqual(update_mock.call_count, 0) - self.assertEqual(result["changes"], {}) - self.assertEqual(result["comment"], "Bindings not changed.") - - def test_should_raise_exception_if_cannot_create_binding(self): - def read_func(jboss_config, binding_name, profile): - return {"success": False, "err_code": "JBAS014807"} - - def create_func(jboss_config, binding_name, value, profile): - return {"success": False, "failure-description": "Incorrect binding name."} - - read_mock = MagicMock(side_effect=read_func) - create_mock = MagicMock(side_effect=create_func) - update_mock = MagicMock() - - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_simple_binding": read_mock, - "jboss7.create_simple_binding": create_mock, - "jboss7.update_simple_binding": update_mock, - }, - ): - # when - try: - jboss7.bindings_exist( - name="bindings", - jboss_config={}, - bindings={"env": "DEV2"}, - profile=None, - ) - self.fail("An exception should be thrown") - except CommandExecutionError as e: - self.assertEqual(str(e), "Incorrect binding name.") - - def test_should_raise_exception_if_cannot_update_binding(self): - def read_func(jboss_config, binding_name, profile): - return {"success": True, "result": {"value": "DEV"}} - - def update_func(jboss_config, binding_name, value, profile): - return {"success": False, "failure-description": "Incorrect binding name."} - - read_mock = MagicMock(side_effect=read_func) - create_mock = MagicMock() - update_mock = MagicMock(side_effect=update_func) - - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_simple_binding": read_mock, - "jboss7.create_simple_binding": create_mock, - "jboss7.update_simple_binding": update_mock, - }, - ): - - # when - try: - jboss7.bindings_exist( - name="bindings", - jboss_config={}, - bindings={"env": "!@#!///some weird value"}, - profile=None, - ) - self.fail("An exception should be thrown") - except CommandExecutionError as e: - self.assertEqual(str(e), "Incorrect binding name.") - - def test_datasource_exist_create_datasource_good_code(self): - jboss_config = { - "cli_path": "/home/ch44d/Desktop/wildfly-18.0.0.Final/bin/jboss-cli.sh", - "controller": "127.0.0.1: 9990", - "cli_user": "user", - "cli_password": "user", - } - - datasource_properties = { - "driver - name": "h2", - "connection - url": "jdbc:sqlserver://127.0.0.1:1433;DatabaseName=test_s2", - "jndi - name": ( - "java:/home/ch44d/Desktop/sqljdbc_7.4/enu/mssql-jdbc-7.4.1.jre8.jar" - ), - "user - name": "user", - "password": "user", - "use - java - context": True, - } - - read_datasource = MagicMock( - return_value={"success": False, "err_code": "WFLYCTL0216"} - ) - - error_msg = "Error: -1" - create_datasource = MagicMock( - return_value={"success": False, "stdout": error_msg} - ) - - with patch.dict( - jboss7.__salt__, - { - "jboss7.read_datasource": read_datasource, - "jboss7.create_datasource": create_datasource, - }, - ): - ret = jboss7.datasource_exists("SQL", jboss_config, datasource_properties) - - self.assertTrue("result" in ret) - self.assertFalse(ret["result"]) - self.assertTrue("comment" in ret) - self.assertTrue(error_msg in ret["comment"]) - - read_datasource.assert_called_once() - create_datasource.assert_called_once() - - def test_datasource_exist_create_datasource_bad_code(self): - jboss_config = { - "cli_path": "/home/ch44d/Desktop/wildfly-18.0.0.Final/bin/jboss-cli.sh", - "controller": "127.0.0.1: 9990", - "cli_user": "user", - "cli_password": "user", - } - - datasource_properties = { - "driver - name": "h2", - "connection - url": "jdbc:sqlserver://127.0.0.1:1433;DatabaseName=test_s2", - "jndi - name": ( - "java:/home/ch44d/Desktop/sqljdbc_7.4/enu/mssql-jdbc-7.4.1.jre8.jar" - ), - "user - name": "user", - "password": "user", - "use - java - context": True, - } - - read_datasource = MagicMock( - return_value={ - "success": False, - "err_code": "WFLYCTL0217", - "failure-description": "Something happened", - } - ) - - with patch.dict(jboss7.__salt__, {"jboss7.read_datasource": read_datasource}): - self.assertRaises( - CommandExecutionError, - jboss7.datasource_exists, - "SQL", - jboss_config, - datasource_properties, - ) - read_datasource.assert_called_once() diff --git a/tests/unit/states/test_kapacitor.py b/tests/unit/states/test_kapacitor.py deleted file mode 100644 index 2a7b78b96ab6..000000000000 --- a/tests/unit/states/test_kapacitor.py +++ /dev/null @@ -1,130 +0,0 @@ -import salt.states.kapacitor as kapacitor -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import Mock, mock_open, patch -from tests.support.unit import TestCase - - -def _present( - name="testname", - tick_script="/tmp/script.tick", - task_type="stream", - database="testdb", - retention_policy="default", - dbrps=None, - enable=True, - task=None, - define_result=True, - enable_result=True, - disable_result=True, - script="testscript", -): - """ - Run a "kapacitor.present" state after setting up mocks, and return the - state return value as well as the mocks to make assertions. - """ - get_mock = Mock(return_value=task) - - if isinstance(define_result, bool): - define_result = {"success": define_result} - define_mock = Mock(return_value=define_result) - - if isinstance(enable_result, bool): - enable_result = {"success": enable_result} - enable_mock = Mock(return_value=enable_result) - - if isinstance(disable_result, bool): - disable_result = {"success": disable_result} - disable_mock = Mock(return_value=disable_result) - - with patch.dict( - kapacitor.__salt__, - { - "kapacitor.get_task": get_mock, - "kapacitor.define_task": define_mock, - "kapacitor.enable_task": enable_mock, - "kapacitor.disable_task": disable_mock, - }, - ): - with patch("salt.utils.files.fopen", mock_open(read_data=script)) as open_mock: - retval = kapacitor.task_present( - name, - tick_script, - task_type=task_type, - database=database, - retention_policy=retention_policy, - enable=enable, - dbrps=dbrps, - ) - - return retval, get_mock, define_mock, enable_mock, disable_mock - - -def _task( - script="testscript", enabled=True, task_type="stream", db="testdb", rp="default" -): - return { - "script": script, - "enabled": enabled, - "type": task_type, - "dbrps": [{"db": db, "rp": rp}], - } - - -class KapacitorTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - return {kapacitor: {"__opts__": {"test": False}, "__env__": "test"}} - - def test_task_present_new_task(self): - ret, get_mock, define_mock, enable_mock, _ = _present( - dbrps=["testdb2.default_rp"] - ) - get_mock.assert_called_once_with("testname") - define_mock.assert_called_once_with( - "testname", - "/tmp/script.tick", - database="testdb", - retention_policy="default", - task_type="stream", - dbrps=["testdb2.default_rp", "testdb.default"], - ) - enable_mock.assert_called_once_with("testname") - self.assertIn("TICKscript diff", ret["changes"]) - self.assertIn("enabled", ret["changes"]) - self.assertEqual(True, ret["changes"]["enabled"]["new"]) - - def test_task_present_existing_task_updated_script(self): - ret, get_mock, define_mock, enable_mock, _ = _present( - task=_task(script="oldscript") - ) - get_mock.assert_called_once_with("testname") - define_mock.assert_called_once_with( - "testname", - "/tmp/script.tick", - database="testdb", - retention_policy="default", - task_type="stream", - dbrps=["testdb.default"], - ) - self.assertEqual(False, enable_mock.called) - self.assertIn("TICKscript diff", ret["changes"]) - self.assertNotIn("enabled", ret["changes"]) - - def test_task_present_existing_task_not_enabled(self): - ret, get_mock, define_mock, enable_mock, _ = _present(task=_task(enabled=False)) - get_mock.assert_called_once_with("testname") - self.assertEqual(False, define_mock.called) - enable_mock.assert_called_once_with("testname") - self.assertNotIn("diff", ret["changes"]) - self.assertIn("enabled", ret["changes"]) - self.assertEqual(True, ret["changes"]["enabled"]["new"]) - - def test_task_present_disable_existing_task(self): - ret, get_mock, define_mock, _, disable_mock = _present( - task=_task(), enable=False - ) - get_mock.assert_called_once_with("testname") - self.assertEqual(False, define_mock.called) - disable_mock.assert_called_once_with("testname") - self.assertNotIn("diff", ret["changes"]) - self.assertIn("enabled", ret["changes"]) - self.assertEqual(False, ret["changes"]["enabled"]["new"]) diff --git a/tests/unit/states/test_kernelpkg.py b/tests/unit/states/test_kernelpkg.py deleted file mode 100644 index 4406dbe0393e..000000000000 --- a/tests/unit/states/test_kernelpkg.py +++ /dev/null @@ -1,160 +0,0 @@ -""" - :synopsis: Unit Tests for 'module.aptkernelpkg' - :platform: Linux - :maturity: develop - versionadded:: 2018.3.0 -""" -# pylint: disable=invalid-name,no-member -import pytest - -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -try: - import salt.states.kernelpkg as kernelpkg - - HAS_MODULES = True -except ImportError: - HAS_MODULES = False - -KERNEL_LIST = ["4.4.0-70-generic", "4.4.0-71-generic", "4.5.1-14-generic"] -STATE_NAME = "kernelpkg-test" - - -@pytest.mark.skipif(not HAS_MODULES, reason="Salt modules could not be loaded") -class KernelPkgTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.aptpkg - """ - - def setup_loader_modules(self): - return { - kernelpkg: { - "__salt__": { - "system.reboot": MagicMock(return_value=None), - "kernelpkg.upgrade": MagicMock( - return_value={ - "upgrades": {"kernel": {"old": "1.0.0", "new": "2.0.0"}} - } - ), - "kernelpkg.active": MagicMock(return_value=0), - "kernelpkg.latest_installed": MagicMock(return_value=0), - } - } - } - - def test_latest_installed_with_changes(self): - """ - Test - latest_installed when an upgrade is available - """ - installed = MagicMock(return_value=KERNEL_LIST[:-1]) - upgrade = MagicMock(return_value=KERNEL_LIST[-1]) - with patch.dict(kernelpkg.__salt__, {"kernelpkg.list_installed": installed}): - with patch.dict( - kernelpkg.__salt__, {"kernelpkg.latest_available": upgrade} - ): - with patch.dict(kernelpkg.__opts__, {"test": False}): - kernelpkg.__salt__["kernelpkg.upgrade"].reset_mock() - ret = kernelpkg.latest_installed(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertTrue(ret["result"]) - self.assertIsInstance(ret["changes"], dict) - self.assertIsInstance(ret["comment"], str) - kernelpkg.__salt__["kernelpkg.upgrade"].assert_called_once() - - with patch.dict(kernelpkg.__opts__, {"test": True}): - kernelpkg.__salt__["kernelpkg.upgrade"].reset_mock() - ret = kernelpkg.latest_installed(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertIsNone(ret["result"]) - self.assertDictEqual(ret["changes"], {}) - self.assertIsInstance(ret["comment"], str) - kernelpkg.__salt__["kernelpkg.upgrade"].assert_not_called() - - def test_latest_installed_at_latest(self): - """ - Test - latest_installed when no upgrade is available - """ - installed = MagicMock(return_value=KERNEL_LIST) - upgrade = MagicMock(return_value=KERNEL_LIST[-1]) - with patch.dict(kernelpkg.__salt__, {"kernelpkg.list_installed": installed}): - with patch.dict( - kernelpkg.__salt__, {"kernelpkg.latest_available": upgrade} - ): - with patch.dict(kernelpkg.__opts__, {"test": False}): - ret = kernelpkg.latest_installed(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertTrue(ret["result"]) - self.assertDictEqual(ret["changes"], {}) - self.assertIsInstance(ret["comment"], str) - kernelpkg.__salt__["kernelpkg.upgrade"].assert_not_called() - - with patch.dict(kernelpkg.__opts__, {"test": True}): - ret = kernelpkg.latest_installed(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertTrue(ret["result"]) - self.assertDictEqual(ret["changes"], {}) - self.assertIsInstance(ret["comment"], str) - kernelpkg.__salt__["kernelpkg.upgrade"].assert_not_called() - - def test_latest_active_with_changes(self): - """ - Test - latest_active when a new kernel is available - """ - reboot = MagicMock(return_value=True) - latest = MagicMock(return_value=1) - with patch.dict( - kernelpkg.__salt__, - {"kernelpkg.needs_reboot": reboot, "kernelpkg.latest_installed": latest}, - ), patch.dict(kernelpkg.__opts__, {"test": False}): - kernelpkg.__salt__["system.reboot"].reset_mock() - ret = kernelpkg.latest_active(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertTrue(ret["result"]) - self.assertIsInstance(ret["changes"], dict) - self.assertIsInstance(ret["comment"], str) - kernelpkg.__salt__["system.reboot"].assert_called_once() - - with patch.dict(kernelpkg.__opts__, {"test": True}): - kernelpkg.__salt__["system.reboot"].reset_mock() - ret = kernelpkg.latest_active(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertIsNone(ret["result"]) - self.assertDictEqual(ret["changes"], {"kernel": {"new": 1, "old": 0}}) - self.assertIsInstance(ret["comment"], str) - kernelpkg.__salt__["system.reboot"].assert_not_called() - - def test_latest_active_at_latest(self): - """ - Test - latest_active when the newest kernel is already active - """ - reboot = MagicMock(return_value=False) - with patch.dict(kernelpkg.__salt__, {"kernelpkg.needs_reboot": reboot}): - with patch.dict(kernelpkg.__opts__, {"test": False}): - kernelpkg.__salt__["system.reboot"].reset_mock() - ret = kernelpkg.latest_active(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertTrue(ret["result"]) - self.assertDictEqual(ret["changes"], {}) - self.assertIsInstance(ret["comment"], str) - kernelpkg.__salt__["system.reboot"].assert_not_called() - - with patch.dict(kernelpkg.__opts__, {"test": True}): - kernelpkg.__salt__["system.reboot"].reset_mock() - ret = kernelpkg.latest_active(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertTrue(ret["result"]) - self.assertDictEqual(ret["changes"], {}) - self.assertIsInstance(ret["comment"], str) - kernelpkg.__salt__["system.reboot"].assert_not_called() - - def test_latest_wait(self): - """ - Test - latest_wait static results - """ - ret = kernelpkg.latest_wait(name=STATE_NAME) - self.assertEqual(ret["name"], STATE_NAME) - self.assertTrue(ret["result"]) - self.assertDictEqual(ret["changes"], {}) - self.assertIsInstance(ret["comment"], str) diff --git a/tests/unit/states/test_keystone.py b/tests/unit/states/test_keystone.py deleted file mode 100644 index 860b48e4ae0e..000000000000 --- a/tests/unit/states/test_keystone.py +++ /dev/null @@ -1,413 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import salt.states.keystone as keystone -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class KeystoneTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.keystone - """ - - def setup_loader_modules(self): - return {keystone: {}} - - # 'user_present' function tests: 1 - - def test_user_present(self): - """ - Test to ensure that the keystone user is present - with the specified properties. - """ - name = "nova" - password = "$up3rn0v4" - email = "nova@domain.com" - tenant = "demo" - - ret = {"name": name, "result": False, "changes": {}, "comment": ""} - - mock_f = MagicMock(return_value=False) - mock_lst = MagicMock(return_value=["Error"]) - with patch.dict(keystone.__salt__, {"keystone.tenant_get": mock_lst}): - comt = 'Tenant / project "{}" does not exist'.format(tenant) - ret.update({"comment": comt}) - self.assertDictEqual( - keystone.user_present(name, password, email, tenant), ret - ) - - mock_dict = MagicMock( - side_effect=[ - {name: {"email": "a@a.com"}}, - {name: {"email": email, "enabled": False}}, - {name: {"email": email, "enabled": True}}, - {name: {"email": email, "enabled": True}}, - {"Error": "error"}, - {"Error": "error"}, - ] - ) - mock_l = MagicMock(return_value={tenant: {"id": "abc"}}) - with patch.dict( - keystone.__salt__, - { - "keystone.user_get": mock_dict, - "keystone.tenant_get": mock_l, - "keystone.user_verify_password": mock_f, - "keystone.user_create": mock_f, - }, - ): - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'User "{}" will be updated'.format(name) - ret.update( - { - "comment": comt, - "result": None, - "changes": { - "Email": "Will be updated", - "Enabled": "Will be True", - "Password": "Will be updated", - }, - } - ) - self.assertDictEqual(keystone.user_present(name, password, email), ret) - - ret.update( - { - "comment": comt, - "result": None, - "changes": { - "Enabled": "Will be True", - "Password": "Will be updated", - }, - } - ) - self.assertDictEqual(keystone.user_present(name, password, email), ret) - - ret.update( - { - "comment": comt, - "result": None, - "changes": { - "Tenant": 'Will be added to "demo" tenant', - "Password": "Will be updated", - }, - } - ) - self.assertDictEqual( - keystone.user_present(name, password, email, tenant), ret - ) - - ret.update( - { - "comment": comt, - "result": None, - "changes": {"Password": "Will be updated"}, - } - ) - self.assertDictEqual(keystone.user_present(name, password, email), ret) - - comt = 'Keystone user "nova" will be added' - ret.update( - { - "comment": comt, - "result": None, - "changes": {"User": "Will be created"}, - } - ) - self.assertDictEqual(keystone.user_present(name, password, email), ret) - - with patch.dict(keystone.__opts__, {"test": False}): - comt = "Keystone user {} has been added".format(name) - ret.update( - {"comment": comt, "result": True, "changes": {"User": "Created"}} - ) - self.assertDictEqual(keystone.user_present(name, password, email), ret) - - # 'user_absent' function tests: 1 - - def test_user_absent(self): - """ - Test to ensure that the keystone user is absent. - """ - name = "nova" - - ret = { - "name": name, - "changes": {}, - "result": True, - "comment": 'User "{}" is already absent'.format(name), - } - - mock_lst = MagicMock(side_effect=[["Error"], []]) - with patch.dict(keystone.__salt__, {"keystone.user_get": mock_lst}): - self.assertDictEqual(keystone.user_absent(name), ret) - - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'User "{}" will be deleted'.format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(keystone.user_absent(name), ret) - - # 'tenant_present' function tests: 1 - - def test_tenant_present(self): - """ - Test to ensures that the keystone tenant exists - """ - name = "nova" - description = "OpenStack Compute Service" - - ret = { - "name": name, - "changes": {}, - "result": True, - "comment": 'Tenant / project "{}" already exists'.format(name), - } - - mock_dict = MagicMock( - side_effect=[ - {name: {"description": "desc"}}, - {name: {"description": description, "enabled": False}}, - {"Error": "error"}, - {"Error": "error"}, - ] - ) - mock_t = MagicMock(return_value=True) - with patch.dict( - keystone.__salt__, - {"keystone.tenant_get": mock_dict, "keystone.tenant_create": mock_t}, - ): - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'Tenant / project "{}" will be updated'.format(name) - ret.update( - { - "comment": comt, - "result": None, - "changes": {"Description": "Will be updated"}, - } - ) - self.assertDictEqual(keystone.tenant_present(name), ret) - - comt = 'Tenant / project "{}" will be updated'.format(name) - ret.update( - { - "comment": comt, - "result": None, - "changes": {"Enabled": "Will be True"}, - } - ) - self.assertDictEqual(keystone.tenant_present(name, description), ret) - - comt = 'Tenant / project "{}" will be added'.format(name) - ret.update( - { - "comment": comt, - "result": None, - "changes": {"Tenant": "Will be created"}, - } - ) - self.assertDictEqual(keystone.tenant_present(name), ret) - - with patch.dict(keystone.__opts__, {"test": False}): - comt = 'Tenant / project "{}" has been added'.format(name) - ret.update( - {"comment": comt, "result": True, "changes": {"Tenant": "Created"}} - ) - self.assertDictEqual(keystone.tenant_present(name), ret) - - # 'tenant_absent' function tests: 1 - - def test_tenant_absent(self): - """ - Test to ensure that the keystone tenant is absent. - """ - name = "nova" - - ret = { - "name": name, - "changes": {}, - "result": True, - "comment": 'Tenant / project "{}" is already absent'.format(name), - } - - mock_lst = MagicMock(side_effect=[["Error"], []]) - with patch.dict(keystone.__salt__, {"keystone.tenant_get": mock_lst}): - self.assertDictEqual(keystone.tenant_absent(name), ret) - - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'Tenant / project "{}" will be deleted'.format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(keystone.tenant_absent(name), ret) - - # 'role_present' function tests: 1 - - def test_role_present(self): - """ - Test to ensures that the keystone role exists - """ - name = "nova" - - ret = { - "name": name, - "changes": {}, - "result": True, - "comment": 'Role "{}" already exists'.format(name), - } - - mock_lst = MagicMock(side_effect=[[], ["Error"]]) - with patch.dict(keystone.__salt__, {"keystone.role_get": mock_lst}): - self.assertDictEqual(keystone.role_present(name), ret) - - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'Role "{}" will be added'.format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(keystone.role_present(name), ret) - - # 'role_absent' function tests: 1 - - def test_role_absent(self): - """ - Test to ensure that the keystone role is absent. - """ - name = "nova" - - ret = { - "name": name, - "changes": {}, - "result": True, - "comment": 'Role "{}" is already absent'.format(name), - } - - mock_lst = MagicMock(side_effect=[["Error"], []]) - with patch.dict(keystone.__salt__, {"keystone.role_get": mock_lst}): - self.assertDictEqual(keystone.role_absent(name), ret) - - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'Role "{}" will be deleted'.format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(keystone.role_absent(name), ret) - - # 'service_present' function tests: 1 - - def test_service_present(self): - """ - Test to ensure service present in Keystone catalog - """ - name = "nova" - service_type = "compute" - - ret = { - "name": name, - "changes": {}, - "result": True, - "comment": 'Service "{}" already exists'.format(name), - } - - mock_lst = MagicMock(side_effect=[[], ["Error"]]) - with patch.dict(keystone.__salt__, {"keystone.service_get": mock_lst}): - self.assertDictEqual(keystone.service_present(name, service_type), ret) - - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'Service "{}" will be added'.format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(keystone.service_present(name, service_type), ret) - - # 'service_absent' function tests: 1 - - def test_service_absent(self): - """ - Test to ensure that the service doesn't exist in Keystone catalog - """ - name = "nova" - - ret = { - "name": name, - "changes": {}, - "result": True, - "comment": 'Service "{}" is already absent'.format(name), - } - - mock_lst = MagicMock(side_effect=[["Error"], []]) - with patch.dict(keystone.__salt__, {"keystone.service_get": mock_lst}): - self.assertDictEqual(keystone.service_absent(name), ret) - - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'Service "{}" will be deleted'.format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(keystone.service_absent(name), ret) - - # 'endpoint_present' function tests: 1 - - def test_endpoint_present(self): - """ - Test to ensure the specified endpoints exists for service - """ - name = "nova" - region = "RegionOne" - - ret = {"name": name, "changes": {}, "result": True, "comment": ""} - - endpoint = { - "adminurl": None, - "region": None, - "internalurl": None, - "publicurl": None, - "id": 1, - "service_id": None, - } - - mock_lst = MagicMock( - side_effect=[endpoint, ["Error"], {"id": 1, "service_id": None}, []] - ) - mock = MagicMock(return_value=True) - with patch.dict( - keystone.__salt__, - {"keystone.endpoint_get": mock_lst, "keystone.endpoint_create": mock}, - ): - - comt = 'Endpoint for service "{}" already exists'.format(name) - ret.update({"comment": comt, "result": True, "changes": {}}) - self.assertDictEqual(keystone.endpoint_present(name), ret) - - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'Endpoint for service "{}" will be added'.format(name) - ret.update( - { - "comment": comt, - "result": None, - "changes": {"Endpoint": "Will be created"}, - } - ) - self.assertDictEqual(keystone.endpoint_present(name), ret) - - comt = 'Endpoint for service "{}" already exists'.format(name) - ret.update({"comment": comt, "result": True, "changes": {}}) - self.assertDictEqual(keystone.endpoint_present(name), ret) - - with patch.dict(keystone.__opts__, {"test": False}): - comt = 'Endpoint for service "{}" has been added'.format(name) - ret.update({"comment": comt, "result": True, "changes": True}) - self.assertDictEqual(keystone.endpoint_present(name), ret) - - # 'endpoint_absent' function tests: 1 - - def test_endpoint_absent(self): - """ - Test to ensure that the endpoint for a service doesn't - exist in Keystone catalog - """ - name = "nova" - region = "RegionOne" - comment = 'Endpoint for service "{}" is already absent'.format(name) - ret = {"name": name, "changes": {}, "result": True, "comment": comment} - - mock_lst = MagicMock(side_effect=[[], ["Error"]]) - with patch.dict(keystone.__salt__, {"keystone.endpoint_get": mock_lst}): - self.assertDictEqual(keystone.endpoint_absent(name, region), ret) - - with patch.dict(keystone.__opts__, {"test": True}): - comt = 'Endpoint for service "{}" will be deleted'.format(name) - ret.update({"comment": comt, "result": None}) - self.assertDictEqual(keystone.endpoint_absent(name, region), ret) diff --git a/tests/unit/states/test_keystore.py b/tests/unit/states/test_keystore.py deleted file mode 100644 index 45995a57a4f0..000000000000 --- a/tests/unit/states/test_keystore.py +++ /dev/null @@ -1,510 +0,0 @@ -""" -Test cases for keystore state -""" - -import salt.states.keystore as keystore -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class KeystoreTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.keystore - """ - - def setup_loader_modules(self): - return {keystore: {"__opts__": {"test": False}}} - - def test_cert_already_present(self): - """ - Test for existing value_present - """ - - cert_return = [ - { - "valid_until": "August 21 2017", - "sha1": "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( - __salt_system_encoding__ - ), - "valid_start": "August 22 2012", - "type": "TrustedCertEntry", - "alias": "stringhost", - "expired": True, - } - ] - x509_return = { - "Not After": "2017-08-21 05:26:54", - "Subject Hash": "97:95:14:4F", - "Serial Number": "0D:FA", - "SHA1 Finger Print": ( - "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( - __salt_system_encoding__ - ) - ), - "SHA-256 Finger Print": "5F:0F:B5:16:65:81:AA:E6:4A:10:1C:15:83:B1:BE:BE:74:E8:14:A9:1E:7A:8A:14:BA:1E:83:5D:78:F6:E9:E7".encode( - __salt_system_encoding__ - ), - "MD5 Finger Print": "80:E6:17:AF:78:D8:E4:B8:FB:5F:41:3A:27:1D:CC:F2".encode( - __salt_system_encoding__ - ), - "Version": 1, - "Key Size": 512, - "Public Key": ( - "-----BEGIN PUBLIC" - " KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJv8ZpB5hEK7qxP9K3v43hUS5fGT4waK\ne7ix4Z4mu5UBv+cw7WSFAt0Vaag0sAbsPzU8Hhsrj/qPABvfB8asUwcCAwEAAQ==\n-----END" - " PUBLIC KEY-----\n" - ), - "Issuer": { - "C": "JP", - "organizationName": "Frank4DD", - "CN": "Frank4DD Web CA", - "SP": "Tokyo", - "L": "Chuo-ku", - "emailAddress": "support@frank4dd.com", - "OU": "WebCert Support", - }, - "Issuer Hash": "92:DA:45:6B", - "Not Before": "2012-08-22 05:26:54", - "Subject": { - "C": "JP", - "SP": "Tokyo", - "organizationName": "Frank4DD", - "CN": "www.example.com", - }, - } - - name = "keystore.jks" - passphrase = "changeit" - entries = [ - { - "alias": "stringhost", - "certificate": """-----BEGIN CERTIFICATE----- - MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG - A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE - MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl - YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw - ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE - CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs - ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl - 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID - AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx - 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy - 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 - Hn+GmxZA\n-----END CERTIFICATE-----""", - } - ] - - state_return = { - "name": name, - "changes": {}, - "result": True, - "comment": "No changes made.\n", - } - - # with patch.dict(keystore.__opts__, {'test': False}): - with patch("os.path.exists", return_value=True): - with patch.dict( - keystore.__salt__, - { - "keystore.list": MagicMock(return_value=cert_return), - "x509.read_certificate": MagicMock(return_value=x509_return), - }, - ): - self.assertDictEqual( - keystore.managed(name, passphrase, entries), state_return - ) - - with patch("os.path.exists", return_value=True): - with patch.dict(keystore.__opts__, {"test": True}): - with patch.dict( - keystore.__salt__, - { - "keystore.list": MagicMock(return_value=cert_return), - "x509.read_certificate": MagicMock(return_value=x509_return), - }, - ): - self.assertDictEqual( - keystore.managed(name, passphrase, entries), state_return - ) - - def test_cert_update(self): - """ - Test for existing value_present - """ - - cert_return = [ - { - "valid_until": "August 21 2017", - "sha1": "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( - __salt_system_encoding__ - ), - "valid_start": "August 22 2012", - "type": "TrustedCertEntry", - "alias": "stringhost", - "expired": True, - } - ] - x509_return = { - "Not After": "2017-08-21 05:26:54", - "Subject Hash": "97:95:14:4F", - "Serial Number": "0D:FA", - "SHA1 Finger Print": ( - "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D6".encode( - __salt_system_encoding__ - ) - ), - "SHA-256 Finger Print": "5F:0F:B5:16:65:81:AA:E6:4A:10:1C:15:83:B1:BE:BE:74:E8:14:A9:1E:7A:8A:14:BA:1E:83:5D:78:F6:E9:E7", - "MD5 Finger Print": "80:E6:17:AF:78:D8:E4:B8:FB:5F:41:3A:27:1D:CC:F2", - "Version": 1, - "Key Size": 512, - "Public Key": ( - "-----BEGIN PUBLIC" - " KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJv8ZpB5hEK7qxP9K3v43hUS5fGT4waK\ne7ix4Z4mu5UBv+cw7WSFAt0Vaag0sAbsPzU8Hhsrj/qPABvfB8asUwcCAwEAAQ==\n-----END" - " PUBLIC KEY-----\n" - ), - "Issuer": { - "C": "JP", - "organizationName": "Frank4DD", - "CN": "Frank4DD Web CA", - "SP": "Tokyo", - "L": "Chuo-ku", - "emailAddress": "support@frank4dd.com", - "OU": "WebCert Support", - }, - "Issuer Hash": "92:DA:45:6B", - "Not Before": "2012-08-22 05:26:54", - "Subject": { - "C": "JP", - "SP": "Tokyo", - "organizationName": "Frank4DD", - "CN": "www.example.com", - }, - } - - name = "keystore.jks" - passphrase = "changeit" - entries = [ - { - "alias": "stringhost", - "certificate": """-----BEGIN CERTIFICATE----- - MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG - A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE - MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl - YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw - ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE - CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs - ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl - 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID - AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx - 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy - 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 - Hn+GmxZA\n-----END CERTIFICATE-----""", - } - ] - - test_return = { - "name": name, - "changes": {}, - "result": None, - "comment": "Alias stringhost would have been updated\n", - } - state_return = { - "name": name, - "changes": {"stringhost": "Updated"}, - "result": True, - "comment": "Alias stringhost updated.\n", - } - - with patch.dict(keystore.__opts__, {"test": True}): - with patch("os.path.exists", return_value=True): - with patch.dict( - keystore.__salt__, - { - "keystore.list": MagicMock(return_value=cert_return), - "x509.read_certificate": MagicMock(return_value=x509_return), - }, - ): - self.assertDictEqual( - keystore.managed(name, passphrase, entries), test_return - ) - - with patch("os.path.exists", return_value=True): - with patch.dict( - keystore.__salt__, - { - "keystore.list": MagicMock(return_value=cert_return), - "x509.read_certificate": MagicMock(return_value=x509_return), - "keystore.remove": MagicMock(return_value=True), - "keystore.add": MagicMock(return_value=True), - }, - ): - self.assertDictEqual( - keystore.managed(name, passphrase, entries), state_return - ) - - def test_cert_update_no_sha1_fingerprint_in_x509(self): - """ - Test for existing value_present - """ - - cert_return = [ - { - "valid_until": "August 21 2017", - "sha1": "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( - __salt_system_encoding__ - ), - "valid_start": "August 22 2012", - "type": "TrustedCertEntry", - "alias": "stringhost", - "expired": True, - } - ] - sha1_return = b"07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5" - x509_return = { - "Not After": "2017-08-21 05:26:54", - "Subject Hash": "97:95:14:4F", - "Serial Number": "0D:FA", - "SHA-256 Finger Print": "5F:0F:B5:16:65:81:AA:E6:4A:10:1C:15:83:B1:BE:BE:74:E8:14:A9:1E:7A:8A:14:BA:1E:83:5D:78:F6:E9:E7", - "MD5 Finger Print": "80:E6:17:AF:78:D8:E4:B8:FB:5F:41:3A:27:1D:CC:F2", - "Version": 1, - "Key Size": 512, - "Public Key": ( - "-----BEGIN PUBLIC" - " KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJv8ZpB5hEK7qxP9K3v43hUS5fGT4waK\ne7ix4Z4mu5UBv+cw7WSFAt0Vaag0sAbsPzU8Hhsrj/qPABvfB8asUwcCAwEAAQ==\n-----END" - " PUBLIC KEY-----\n" - ), - "Issuer": { - "C": "JP", - "organizationName": "Frank4DD", - "CN": "Frank4DD Web CA", - "SP": "Tokyo", - "L": "Chuo-ku", - "emailAddress": "support@frank4dd.com", - "OU": "WebCert Support", - }, - "Issuer Hash": "92:DA:45:6B", - "Not Before": "2012-08-22 05:26:54", - "Subject": { - "C": "JP", - "SP": "Tokyo", - "organizationName": "Frank4DD", - "CN": "www.example.com", - }, - } - - name = "keystore.jks" - passphrase = "changeit" - entries = [ - { - "alias": "stringhost", - "certificate": """-----BEGIN CERTIFICATE----- - MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG - A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE - MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl - YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw - ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE - CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs - ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl - 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID - AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx - 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy - 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 - Hn+GmxZA\n-----END CERTIFICATE-----""", - } - ] - - test_return = { - "name": name, - "changes": {}, - "result": True, - "comment": "No changes made.\n", - } - with patch("os.path.exists", return_value=True): - with patch.dict(keystore.__opts__, {"test": True}): - with patch.dict( - keystore.__salt__, - { - "keystore.list": MagicMock(return_value=cert_return), - "x509.read_certificate": MagicMock(return_value=x509_return), - "keystore.get_sha1": MagicMock(return_value=sha1_return), - }, - ): - self.assertDictEqual( - keystore.managed(name, passphrase, entries), test_return - ) - - def test_new_file(self): - """ - Test for existing value_present - """ - name = "keystore.jks" - passphrase = "changeit" - entries = [ - { - "alias": "stringhost", - "certificate": """-----BEGIN CERTIFICATE----- - MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG - A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE - MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl - YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw - ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE - CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs - ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl - 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID - AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx - 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy - 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 - Hn+GmxZA\n-----END CERTIFICATE-----""", - } - ] - - test_return = { - "name": name, - "changes": {}, - "result": None, - "comment": "Alias stringhost would have been added\n", - } - state_return = { - "name": name, - "changes": {"stringhost": "Added"}, - "result": True, - "comment": "Alias stringhost added.\n", - } - - with patch.dict(keystore.__opts__, {"test": True}): - self.assertDictEqual( - keystore.managed(name, passphrase, entries), test_return - ) - - with patch("os.path.exists", return_value=False): - with patch.dict( - keystore.__salt__, - { - "keystore.remove": MagicMock(return_value=True), - "keystore.add": MagicMock(return_value=True), - }, - ): - self.assertDictEqual( - keystore.managed(name, passphrase, entries), state_return - ) - - def test_force_remove(self): - """ - Test for existing value_present - """ - - cert_return = [ - { - "valid_until": "August 21 2017", - "sha1": "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D5".encode( - __salt_system_encoding__ - ), - "valid_start": "August 22 2012", - "type": "TrustedCertEntry", - "alias": "oldhost", - "expired": True, - } - ] - x509_return = { - "Not After": "2017-08-21 05:26:54", - "Subject Hash": "97:95:14:4F", - "Serial Number": "0D:FA", - "SHA1 Finger Print": ( - "07:1C:B9:4F:0C:C8:51:4D:02:41:24:70:8E:E8:B2:68:7B:D7:D9:D6".encode( - __salt_system_encoding__ - ) - ), - "SHA-256 Finger Print": "5F:0F:B5:16:65:81:AA:E6:4A:10:1C:15:83:B1:BE:BE:74:E8:14:A9:1E:7A:8A:14:BA:1E:83:5D:78:F6:E9:E7", - "MD5 Finger Print": "80:E6:17:AF:78:D8:E4:B8:FB:5F:41:3A:27:1D:CC:F2", - "Version": 1, - "Key Size": 512, - "Public Key": ( - "-----BEGIN PUBLIC" - " KEY-----\nMFwwDQYJKoZIhvcNAQEBBQADSwAwSAJBAJv8ZpB5hEK7qxP9K3v43hUS5fGT4waK\ne7ix4Z4mu5UBv+cw7WSFAt0Vaag0sAbsPzU8Hhsrj/qPABvfB8asUwcCAwEAAQ==\n-----END" - " PUBLIC KEY-----\n" - ), - "Issuer": { - "C": "JP", - "organizationName": "Frank4DD", - "CN": "Frank4DD Web CA", - "SP": "Tokyo", - "L": "Chuo-ku", - "emailAddress": "support@frank4dd.com", - "OU": "WebCert Support", - }, - "Issuer Hash": "92:DA:45:6B", - "Not Before": "2012-08-22 05:26:54", - "Subject": { - "C": "JP", - "SP": "Tokyo", - "organizationName": "Frank4DD", - "CN": "www.example.com", - }, - } - - name = "keystore.jks" - passphrase = "changeit" - entries = [ - { - "alias": "stringhost", - "certificate": """-----BEGIN CERTIFICATE----- - MIICEjCCAXsCAg36MA0GCSqGSIb3DQEBBQUAMIGbMQswCQYDVQQGEwJKUDEOMAwG - A1UECBMFVG9reW8xEDAOBgNVBAcTB0NodW8ta3UxETAPBgNVBAoTCEZyYW5rNERE - MRgwFgYDVQQLEw9XZWJDZXJ0IFN1cHBvcnQxGDAWBgNVBAMTD0ZyYW5rNEREIFdl - YiBDQTEjMCEGCSqGSIb3DQEJARYUc3VwcG9ydEBmcmFuazRkZC5jb20wHhcNMTIw - ODIyMDUyNjU0WhcNMTcwODIxMDUyNjU0WjBKMQswCQYDVQQGEwJKUDEOMAwGA1UE - CAwFVG9reW8xETAPBgNVBAoMCEZyYW5rNEREMRgwFgYDVQQDDA93d3cuZXhhbXBs - ZS5jb20wXDANBgkqhkiG9w0BAQEFAANLADBIAkEAm/xmkHmEQrurE/0re/jeFRLl - 8ZPjBop7uLHhnia7lQG/5zDtZIUC3RVpqDSwBuw/NTweGyuP+o8AG98HxqxTBwID - AQABMA0GCSqGSIb3DQEBBQUAA4GBABS2TLuBeTPmcaTaUW/LCB2NYOy8GMdzR1mx - 8iBIu2H6/E2tiY3RIevV2OW61qY2/XRQg7YPxx3ffeUugX9F4J/iPnnu1zAxxyBy - 2VguKv4SWjRFoRkIfIlHX0qVviMhSlNy2ioFLy7JcPZb+v3ftDGywUqcBiVDoea0 - Hn+GmxZA\n-----END CERTIFICATE-----""", - } - ] - - test_return = { - "name": name, - "changes": {}, - "result": None, - "comment": ( - "Alias stringhost would have been updated\nAlias oldhost would have" - " been removed" - ), - } - state_return = { - "name": name, - "changes": {"oldhost": "Removed", "stringhost": "Updated"}, - "result": True, - "comment": "Alias stringhost updated.\nAlias oldhost removed.\n", - } - - with patch.dict(keystore.__opts__, {"test": True}): - with patch("os.path.exists", return_value=True): - with patch.dict( - keystore.__salt__, - { - "keystore.list": MagicMock(return_value=cert_return), - "x509.read_certificate": MagicMock(return_value=x509_return), - }, - ): - self.assertDictEqual( - keystore.managed(name, passphrase, entries, force_remove=True), - test_return, - ) - - with patch("os.path.exists", return_value=True): - with patch.dict( - keystore.__salt__, - { - "keystore.list": MagicMock(return_value=cert_return), - "x509.read_certificate": MagicMock(return_value=x509_return), - "keystore.remove": MagicMock(return_value=True), - "keystore.add": MagicMock(return_value=True), - }, - ): - self.assertDictEqual( - keystore.managed(name, passphrase, entries, force_remove=True), - state_return, - ) diff --git a/tests/unit/states/test_kubernetes.py b/tests/unit/states/test_kubernetes.py deleted file mode 100644 index b90b9b37ebf9..000000000000 --- a/tests/unit/states/test_kubernetes.py +++ /dev/null @@ -1,897 +0,0 @@ -""" - :codeauthor: :email:`Jeff Schroeder ` -""" - -import base64 -from contextlib import contextmanager - -import pytest - -import salt.modules.kubernetesmod as kubernetesmod -import salt.states.kubernetes as kubernetes -import salt.utils.stringutils -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -@pytest.mark.skipif( - kubernetesmod.HAS_LIBS is False, - reason="Probably Kubernetes client lib is not installed. Skipping test_kubernetes.py", -) -class KubernetesTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.kubernetes - """ - - def setup_loader_modules(self): - return {kubernetes: {"__env__": "base"}} - - @contextmanager - def mock_func(self, func_name, return_value, test=False): - """ - Mock any of the kubernetes state function return values and set - the test options. - """ - name = "kubernetes.{}".format(func_name) - mocked = {name: MagicMock(return_value=return_value)} - with patch.dict(kubernetes.__salt__, mocked) as patched: - with patch.dict(kubernetes.__opts__, {"test": test}): - yield patched - - def make_configmap(self, name, namespace="default", data=None): - return self.make_ret_dict( - kind="ConfigMap", - name=name, - namespace=namespace, - data=data, - ) - - def make_secret(self, name, namespace="default", data=None): - secret_data = self.make_ret_dict( - kind="Secret", - name=name, - namespace=namespace, - data=data, - ) - # Base64 all of the values just like kubectl does - for key, value in secret_data["data"].items(): - secret_data["data"][key] = base64.b64encode( - salt.utils.stringutils.to_bytes(value) - ) - - return secret_data - - def make_node_labels(self, name="minikube"): - return { - "kubernetes.io/hostname": name, - "beta.kubernetes.io/os": "linux", - "beta.kubernetes.io/arch": "amd64", - "failure-domain.beta.kubernetes.io/region": "us-west-1", - } - - def make_node(self, name="minikube"): - node_data = self.make_ret_dict(kind="Node", name="minikube") - node_data.update( - { - "api_version": "v1", - "kind": "Node", - "metadata": { - "annotations": {"node.alpha.kubernetes.io/ttl": "0"}, - "labels": self.make_node_labels(name=name), - "name": name, - "namespace": None, - "self_link": "/api/v1/nodes/{name}".format(name=name), - "uid": "7811b8ae-c1a1-11e7-a55a-0800279fb61e", - }, - "spec": {"external_id": name}, - "status": {}, - } - ) - return node_data - - def make_namespace(self, name="default"): - namespace_data = self.make_ret_dict(kind="Namespace", name=name) - del namespace_data["data"] - namespace_data.update( - { - "status": {"phase": "Active"}, - "spec": {"finalizers": ["kubernetes"]}, - "metadata": { - "name": name, - "namespace": None, - "labels": None, - "self_link": "/api/v1/namespaces/{namespace}".format( - namespace=name, - ), - "annotations": None, - "uid": "752fceeb-c1a1-11e7-a55a-0800279fb61e", - }, - } - ) - return namespace_data - - def make_ret_dict(self, kind, name, namespace=None, data=None): - """ - Make a minimal example configmap or secret for using in mocks - """ - - assert kind in ("Secret", "ConfigMap", "Namespace", "Node") - - if data is None: - data = {} - - self_link = "/api/v1/namespaces/{namespace}/{kind}s/{name}".format( - namespace=namespace, - kind=kind.lower(), - name=name, - ) - - return_data = { - "kind": kind, - "data": data, - "api_version": "v1", - "metadata": { - "name": name, - "labels": None, - "namespace": namespace, - "self_link": self_link, - "annotations": {"kubernetes.io/change-cause": "salt-call state.apply"}, - }, - } - return return_data - - def test_configmap_present__fail(self): - error = kubernetes.configmap_present( - name="testme", - data={1: 1}, - source="salt://beyond/oblivion.jinja", - ) - self.assertDictEqual( - { - "changes": {}, - "result": False, - "name": "testme", - "comment": "'source' cannot be used in combination with 'data'", - }, - error, - ) - - def test_configmap_present__create_test_true(self): - # Create a new configmap with test=True - with self.mock_func("show_configmap", return_value=None, test=True): - ret = kubernetes.configmap_present( - name="example", - data={"example.conf": "# empty config file"}, - ) - self.assertDictEqual( - { - "comment": "The configmap is going to be created", - "changes": {}, - "name": "example", - "result": None, - }, - ret, - ) - - def test_configmap_present__create(self): - # Create a new configmap - with self.mock_func("show_configmap", return_value=None): - cm = self.make_configmap( - name="test", - namespace="default", - data={"foo": "bar"}, - ) - with self.mock_func("create_configmap", return_value=cm): - actual = kubernetes.configmap_present( - name="test", - data={"foo": "bar"}, - ) - self.assertDictEqual( - { - "comment": "", - "changes": {"data": {"foo": "bar"}}, - "name": "test", - "result": True, - }, - actual, - ) - - def test_configmap_present__create_no_data(self): - # Create a new configmap with no 'data' attribute - with self.mock_func("show_configmap", return_value=None): - cm = self.make_configmap( - name="test", - namespace="default", - ) - with self.mock_func("create_configmap", return_value=cm): - actual = kubernetes.configmap_present(name="test") - self.assertDictEqual( - { - "comment": "", - "changes": {"data": {}}, - "name": "test", - "result": True, - }, - actual, - ) - - def test_configmap_present__replace_test_true(self): - cm = self.make_configmap( - name="settings", - namespace="saltstack", - data={"foobar.conf": "# Example configuration"}, - ) - with self.mock_func("show_configmap", return_value=cm, test=True): - ret = kubernetes.configmap_present( - name="settings", - namespace="saltstack", - data={"foobar.conf": "# Example configuration"}, - ) - self.assertDictEqual( - { - "comment": "The configmap is going to be replaced", - "changes": {}, - "name": "settings", - "result": None, - }, - ret, - ) - - def test_configmap_present__replace(self): - cm = self.make_configmap(name="settings", data={"action": "make=war"}) - # Replace an existing configmap - with self.mock_func("show_configmap", return_value=cm): - new_cm = cm.copy() - new_cm.update({"data": {"action": "make=peace"}}) - with self.mock_func("replace_configmap", return_value=new_cm): - actual = kubernetes.configmap_present( - name="settings", - data={"action": "make=peace"}, - ) - self.assertDictEqual( - { - "comment": ( - "The configmap is already present. Forcing recreation" - ), - "changes": {"data": {"action": "make=peace"}}, - "name": "settings", - "result": True, - }, - actual, - ) - - def test_configmap_absent__noop_test_true(self): - # Nothing to delete with test=True - with self.mock_func("show_configmap", return_value=None, test=True): - actual = kubernetes.configmap_absent(name="NOT_FOUND") - self.assertDictEqual( - { - "comment": "The configmap does not exist", - "changes": {}, - "name": "NOT_FOUND", - "result": None, - }, - actual, - ) - - def test_configmap_absent__test_true(self): - # Configmap exists with test=True - cm = self.make_configmap(name="deleteme", namespace="default") - with self.mock_func("show_configmap", return_value=cm, test=True): - actual = kubernetes.configmap_absent(name="deleteme") - self.assertDictEqual( - { - "comment": "The configmap is going to be deleted", - "changes": {}, - "name": "deleteme", - "result": None, - }, - actual, - ) - - def test_configmap_absent__noop(self): - # Nothing to delete - with self.mock_func("show_configmap", return_value=None): - actual = kubernetes.configmap_absent(name="NOT_FOUND") - self.assertDictEqual( - { - "comment": "The configmap does not exist", - "changes": {}, - "name": "NOT_FOUND", - "result": True, - }, - actual, - ) - - def test_configmap_absent(self): - # Configmap exists, delete it! - cm = self.make_configmap(name="deleteme", namespace="default") - with self.mock_func("show_configmap", return_value=cm): - # The return from this module isn't used in the state - with self.mock_func("delete_configmap", return_value={}): - actual = kubernetes.configmap_absent(name="deleteme") - self.assertDictEqual( - { - "comment": "ConfigMap deleted", - "changes": { - "kubernetes.configmap": { - "new": "absent", - "old": "present", - }, - }, - "name": "deleteme", - "result": True, - }, - actual, - ) - - def test_secret_present__fail(self): - actual = kubernetes.secret_present( - name="sekret", - data={"password": "monk3y"}, - source="salt://nope.jinja", - ) - self.assertDictEqual( - { - "changes": {}, - "result": False, - "name": "sekret", - "comment": "'source' cannot be used in combination with 'data'", - }, - actual, - ) - - def test_secret_present__exists_test_true(self): - secret = self.make_secret(name="sekret") - new_secret = secret.copy() - new_secret.update({"data": {"password": "uncle"}}) - # Secret exists already and needs replacing with test=True - with self.mock_func("show_secret", return_value=secret): - with self.mock_func("replace_secret", return_value=new_secret, test=True): - actual = kubernetes.secret_present( - name="sekret", - data={"password": "uncle"}, - ) - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "sekret", - "comment": "The secret is going to be replaced", - }, - actual, - ) - - def test_secret_present__exists(self): - # Secret exists and gets replaced - secret = self.make_secret(name="sekret", data={"password": "booyah"}) - with self.mock_func("show_secret", return_value=secret): - with self.mock_func("replace_secret", return_value=secret): - actual = kubernetes.secret_present( - name="sekret", - data={"password": "booyah"}, - ) - self.assertDictEqual( - { - "changes": {"data": ["password"]}, - "result": True, - "name": "sekret", - "comment": "The secret is already present. Forcing recreation", - }, - actual, - ) - - def test_secret_present__create(self): - # Secret exists and gets replaced - secret = self.make_secret(name="sekret", data={"password": "booyah"}) - with self.mock_func("show_secret", return_value=None): - with self.mock_func("create_secret", return_value=secret): - actual = kubernetes.secret_present( - name="sekret", - data={"password": "booyah"}, - ) - self.assertDictEqual( - { - "changes": {"data": ["password"]}, - "result": True, - "name": "sekret", - "comment": "", - }, - actual, - ) - - def test_secret_present__create_no_data(self): - # Secret exists and gets replaced - secret = self.make_secret(name="sekret") - with self.mock_func("show_secret", return_value=None): - with self.mock_func("create_secret", return_value=secret): - actual = kubernetes.secret_present(name="sekret") - self.assertDictEqual( - { - "changes": {"data": []}, - "result": True, - "name": "sekret", - "comment": "", - }, - actual, - ) - - def test_secret_present__create_test_true(self): - # Secret exists and gets replaced with test=True - secret = self.make_secret(name="sekret") - with self.mock_func("show_secret", return_value=None): - with self.mock_func("create_secret", return_value=secret, test=True): - actual = kubernetes.secret_present(name="sekret") - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "sekret", - "comment": "The secret is going to be created", - }, - actual, - ) - - def test_secret_absent__noop_test_true(self): - with self.mock_func("show_secret", return_value=None, test=True): - actual = kubernetes.secret_absent(name="sekret") - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "sekret", - "comment": "The secret does not exist", - }, - actual, - ) - - def test_secret_absent__noop(self): - with self.mock_func("show_secret", return_value=None): - actual = kubernetes.secret_absent(name="passwords") - self.assertDictEqual( - { - "changes": {}, - "result": True, - "name": "passwords", - "comment": "The secret does not exist", - }, - actual, - ) - - def test_secret_absent__delete_test_true(self): - secret = self.make_secret(name="credentials", data={"redis": "letmein"}) - with self.mock_func("show_secret", return_value=secret): - with self.mock_func("delete_secret", return_value=secret, test=True): - actual = kubernetes.secret_absent(name="credentials") - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "credentials", - "comment": "The secret is going to be deleted", - }, - actual, - ) - - def test_secret_absent__delete(self): - secret = self.make_secret(name="foobar", data={"redis": "letmein"}) - deleted = { - "status": None, - "kind": "Secret", - "code": None, - "reason": None, - "details": None, - "message": None, - "api_version": "v1", - "metadata": { - "self_link": "/api/v1/namespaces/default/secrets/foobar", - "resource_version": "30292", - }, - } - with self.mock_func("show_secret", return_value=secret): - with self.mock_func("delete_secret", return_value=deleted): - actual = kubernetes.secret_absent(name="foobar") - self.assertDictEqual( - { - "changes": { - "kubernetes.secret": {"new": "absent", "old": "present"}, - }, - "result": True, - "name": "foobar", - "comment": "Secret deleted", - }, - actual, - ) - - def test_node_label_present__add_test_true(self): - labels = self.make_node_labels() - with self.mock_func("node_labels", return_value=labels, test=True): - actual = kubernetes.node_label_present( - name="com.zoo-animal", - node="minikube", - value="monkey", - ) - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "com.zoo-animal", - "comment": "The label is going to be set", - }, - actual, - ) - - def test_node_label_present__add(self): - node_data = self.make_node() - # Remove some of the defaults to make it simpler - node_data["metadata"]["labels"] = { - "beta.kubernetes.io/os": "linux", - } - labels = node_data["metadata"]["labels"] - - with self.mock_func("node_labels", return_value=labels): - with self.mock_func("node_add_label", return_value=node_data): - actual = kubernetes.node_label_present( - name="failure-domain.beta.kubernetes.io/zone", - node="minikube", - value="us-central1-a", - ) - self.assertDictEqual( - { - "comment": "", - "changes": { - "minikube.failure-domain.beta.kubernetes.io/zone": { - "new": { - "failure-domain.beta.kubernetes.io/zone": ( - "us-central1-a" - ), - "beta.kubernetes.io/os": "linux", - }, - "old": {"beta.kubernetes.io/os": "linux"}, - }, - }, - "name": "failure-domain.beta.kubernetes.io/zone", - "result": True, - }, - actual, - ) - - def test_node_label_present__already_set(self): - node_data = self.make_node() - labels = node_data["metadata"]["labels"] - with self.mock_func("node_labels", return_value=labels): - with self.mock_func("node_add_label", return_value=node_data): - actual = kubernetes.node_label_present( - name="failure-domain.beta.kubernetes.io/region", - node="minikube", - value="us-west-1", - ) - self.assertDictEqual( - { - "changes": {}, - "result": True, - "name": "failure-domain.beta.kubernetes.io/region", - "comment": ( - "The label is already set and has the specified value" - ), - }, - actual, - ) - - def test_node_label_present__update_test_true(self): - node_data = self.make_node() - labels = node_data["metadata"]["labels"] - with self.mock_func("node_labels", return_value=labels): - with self.mock_func("node_add_label", return_value=node_data, test=True): - actual = kubernetes.node_label_present( - name="failure-domain.beta.kubernetes.io/region", - node="minikube", - value="us-east-1", - ) - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "failure-domain.beta.kubernetes.io/region", - "comment": "The label is going to be updated", - }, - actual, - ) - - def test_node_label_present__update(self): - node_data = self.make_node() - # Remove some of the defaults to make it simpler - node_data["metadata"]["labels"] = { - "failure-domain.beta.kubernetes.io/region": "us-west-1", - } - labels = node_data["metadata"]["labels"] - with self.mock_func("node_labels", return_value=labels): - with self.mock_func("node_add_label", return_value=node_data): - actual = kubernetes.node_label_present( - name="failure-domain.beta.kubernetes.io/region", - node="minikube", - value="us-east-1", - ) - self.assertDictEqual( - { - "changes": { - "minikube.failure-domain.beta.kubernetes.io/region": { - "new": { - "failure-domain.beta.kubernetes.io/region": ( - "us-east-1" - ) - }, - "old": { - "failure-domain.beta.kubernetes.io/region": ( - "us-west-1" - ) - }, - } - }, - "result": True, - "name": "failure-domain.beta.kubernetes.io/region", - "comment": "The label is already set, changing the value", - }, - actual, - ) - - def test_node_label_absent__noop_test_true(self): - labels = self.make_node_labels() - with self.mock_func("node_labels", return_value=labels, test=True): - actual = kubernetes.node_label_absent( - name="non-existent-label", - node="minikube", - ) - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "non-existent-label", - "comment": "The label does not exist", - }, - actual, - ) - - def test_node_label_absent__noop(self): - labels = self.make_node_labels() - with self.mock_func("node_labels", return_value=labels): - actual = kubernetes.node_label_absent( - name="non-existent-label", - node="minikube", - ) - self.assertDictEqual( - { - "changes": {}, - "result": True, - "name": "non-existent-label", - "comment": "The label does not exist", - }, - actual, - ) - - def test_node_label_absent__delete_test_true(self): - labels = self.make_node_labels() - with self.mock_func("node_labels", return_value=labels, test=True): - actual = kubernetes.node_label_absent( - name="failure-domain.beta.kubernetes.io/region", - node="minikube", - ) - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "failure-domain.beta.kubernetes.io/region", - "comment": "The label is going to be deleted", - }, - actual, - ) - - def test_node_label_absent__delete(self): - node_data = self.make_node() - labels = node_data["metadata"]["labels"].copy() - - node_data["metadata"]["labels"].pop("failure-domain.beta.kubernetes.io/region") - - with self.mock_func("node_labels", return_value=labels): - with self.mock_func("node_remove_label", return_value=node_data): - actual = kubernetes.node_label_absent( - name="failure-domain.beta.kubernetes.io/region", - node="minikube", - ) - self.assertDictEqual( - { - "result": True, - "changes": { - "kubernetes.node_label": { - "new": "absent", - "old": "present", - } - }, - "comment": "Label removed from node", - "name": "failure-domain.beta.kubernetes.io/region", - }, - actual, - ) - - def test_namespace_present__create_test_true(self): - with self.mock_func("show_namespace", return_value=None, test=True): - actual = kubernetes.namespace_present(name="saltstack") - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "saltstack", - "comment": "The namespace is going to be created", - }, - actual, - ) - - def test_namespace_present__create(self): - namespace_data = self.make_namespace(name="saltstack") - with self.mock_func("show_namespace", return_value=None): - with self.mock_func("create_namespace", return_value=namespace_data): - actual = kubernetes.namespace_present(name="saltstack") - self.assertDictEqual( - { - "changes": {"namespace": {"new": namespace_data, "old": {}}}, - "result": True, - "name": "saltstack", - "comment": "", - }, - actual, - ) - - def test_namespace_present__noop_test_true(self): - namespace_data = self.make_namespace(name="saltstack") - with self.mock_func("show_namespace", return_value=namespace_data, test=True): - actual = kubernetes.namespace_present(name="saltstack") - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "saltstack", - "comment": "The namespace already exists", - }, - actual, - ) - - def test_namespace_present__noop(self): - namespace_data = self.make_namespace(name="saltstack") - with self.mock_func("show_namespace", return_value=namespace_data): - actual = kubernetes.namespace_present(name="saltstack") - self.assertDictEqual( - { - "changes": {}, - "result": True, - "name": "saltstack", - "comment": "The namespace already exists", - }, - actual, - ) - - def test_namespace_absent__noop_test_true(self): - with self.mock_func("show_namespace", return_value=None, test=True): - actual = kubernetes.namespace_absent(name="salt") - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "salt", - "comment": "The namespace does not exist", - }, - actual, - ) - - def test_namespace_absent__noop(self): - with self.mock_func("show_namespace", return_value=None): - actual = kubernetes.namespace_absent(name="salt") - self.assertDictEqual( - { - "changes": {}, - "result": True, - "name": "salt", - "comment": "The namespace does not exist", - }, - actual, - ) - - def test_namespace_absent__delete_test_true(self): - namespace_data = self.make_namespace(name="salt") - with self.mock_func("show_namespace", return_value=namespace_data, test=True): - actual = kubernetes.namespace_absent(name="salt") - self.assertDictEqual( - { - "changes": {}, - "result": None, - "name": "salt", - "comment": "The namespace is going to be deleted", - }, - actual, - ) - - def test_namespace_absent__delete_code_200(self): - namespace_data = self.make_namespace(name="salt") - deleted = namespace_data.copy() - deleted["code"] = 200 - deleted.update({"code": 200, "message": None}) - with self.mock_func("show_namespace", return_value=namespace_data): - with self.mock_func("delete_namespace", return_value=deleted): - actual = kubernetes.namespace_absent(name="salt") - self.assertDictEqual( - { - "changes": { - "kubernetes.namespace": {"new": "absent", "old": "present"} - }, - "result": True, - "name": "salt", - "comment": "Terminating", - }, - actual, - ) - - def test_namespace_absent__delete_status_terminating(self): - namespace_data = self.make_namespace(name="salt") - deleted = namespace_data.copy() - deleted.update( - { - "code": None, - "status": "Terminating namespace", - "message": "Terminating this shizzzle yo", - } - ) - with self.mock_func("show_namespace", return_value=namespace_data): - with self.mock_func("delete_namespace", return_value=deleted): - actual = kubernetes.namespace_absent(name="salt") - self.assertDictEqual( - { - "changes": { - "kubernetes.namespace": {"new": "absent", "old": "present"} - }, - "result": True, - "name": "salt", - "comment": "Terminating this shizzzle yo", - }, - actual, - ) - - def test_namespace_absent__delete_status_phase_terminating(self): - # This is what kubernetes 1.8.0 looks like when deleting namespaces - namespace_data = self.make_namespace(name="salt") - deleted = namespace_data.copy() - deleted.update( - {"code": None, "message": None, "status": {"phase": "Terminating"}} - ) - with self.mock_func("show_namespace", return_value=namespace_data): - with self.mock_func("delete_namespace", return_value=deleted): - actual = kubernetes.namespace_absent(name="salt") - self.assertDictEqual( - { - "changes": { - "kubernetes.namespace": {"new": "absent", "old": "present"} - }, - "result": True, - "name": "salt", - "comment": "Terminating", - }, - actual, - ) - - def test_namespace_absent__delete_error(self): - namespace_data = self.make_namespace(name="salt") - deleted = namespace_data.copy() - deleted.update({"code": 418, "message": "I' a teapot!", "status": None}) - with self.mock_func("show_namespace", return_value=namespace_data): - with self.mock_func("delete_namespace", return_value=deleted): - actual = kubernetes.namespace_absent(name="salt") - self.assertDictEqual( - { - "changes": {}, - "result": False, - "name": "salt", - "comment": "Something went wrong, response: {}".format( - deleted, - ), - }, - actual, - ) diff --git a/tests/unit/states/test_linux_acl.py b/tests/unit/states/test_linux_acl.py deleted file mode 100644 index 2961fbad53ad..000000000000 --- a/tests/unit/states/test_linux_acl.py +++ /dev/null @@ -1,589 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import pytest - -import salt.states.linux_acl as linux_acl -from salt.exceptions import CommandExecutionError -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -@pytest.mark.skip_unless_on_linux -class LinuxAclTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.linux_acl - """ - - def setup_loader_modules(self): - return {linux_acl: {}} - - # 'present' function tests: 1 - - def test_present(self): - """ - Test to ensure a Linux ACL is present - """ - self.maxDiff = None - name = "/root" - acl_type = "users" - acl_name = "damian" - perms = "rwx" - - mock = MagicMock( - side_effect=[ - {name: {acl_type: [{acl_name: {"octal": 5}}]}}, - {name: {acl_type: [{acl_name: {"octal": 5}}]}}, - {name: {acl_type: [{acl_name: {"octal": 5}}]}}, - {name: {acl_type: [{}]}}, - {name: {acl_type: [{}]}}, - {name: {acl_type: [{}]}}, - { - name: {acl_type: [{acl_name: {"octal": 7}}]}, - name + "/foo": {acl_type: [{acl_name: {"octal": 5}}]}, - }, - { - name: {acl_type: [{acl_name: {"octal": 7}}]}, - name + "/foo": {acl_type: [{acl_name: {"octal": 7}}]}, - }, - {name: {acl_type: ""}}, - { - name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - }, - { - name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - }, - { - name: {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - name + "/foo": {"defaults": {"users": [{acl_name: {"octal": 7}}]}}, - }, - ] - ) - mock_modfacl = MagicMock(return_value=True) - - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Updated permissions will be applied for {}: r-x -> {}".format( - acl_name, perms - ) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - }, - "old": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": "r-x", - }, - }, - "result": None, - } - - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # Update - test=False - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Updated permissions for {}".format(acl_name) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - }, - "old": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": "r-x", - }, - }, - "result": True, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # Update - modfacl error - with patch.dict( - linux_acl.__salt__, - { - "acl.modfacl": MagicMock( - side_effect=CommandExecutionError("Custom err") - ) - }, - ): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Error updating permissions for {}: Custom err".format( - acl_name - ) - ret = { - "name": name, - "comment": comt, - "changes": {}, - "result": False, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # New - test=True - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "New permissions will be applied for {}: {}".format( - acl_name, perms - ) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - } - }, - "result": None, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # New - test=False - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Applied new permissions for {}".format(acl_name) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - } - }, - "result": True, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - # New - modfacl error - with patch.dict( - linux_acl.__salt__, - { - "acl.modfacl": MagicMock( - side_effect=CommandExecutionError("Custom err") - ) - }, - ): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Error updating permissions for {}: Custom err".format( - acl_name - ) - ret = { - "name": name, - "comment": comt, - "changes": {}, - "result": False, - } - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - - # New - recurse true - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = ( - "Updated permissions will be applied for {}: rwx -> {}".format( - acl_name, perms - ) - ) - ret = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": perms, - }, - "old": { - "acl_name": acl_name, - "acl_type": acl_type, - "perms": "rwx", - }, - }, - "result": None, - } - - self.assertDictEqual( - linux_acl.present( - name, acl_type, acl_name, perms, recurse=True - ), - ret, - ) - - # New - recurse true - nothing to do - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Permissions are in the desired state" - ret = {"name": name, "comment": comt, "changes": {}, "result": True} - - self.assertDictEqual( - linux_acl.present( - name, acl_type, acl_name, perms, recurse=True - ), - ret, - ) - - # No acl type - comt = "ACL Type does not exist" - ret = {"name": name, "comment": comt, "result": False, "changes": {}} - self.assertDictEqual( - linux_acl.present(name, acl_type, acl_name, perms), ret - ) - - # default recurse false - nothing to do - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Permissions are in the desired state" - ret = {"name": name, "comment": comt, "changes": {}, "result": True} - - self.assertDictEqual( - linux_acl.present( - name, "d:" + acl_type, acl_name, perms, recurse=False - ), - ret, - ) - - # default recurse false - nothing to do - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Permissions are in the desired state" - ret = {"name": name, "comment": comt, "changes": {}, "result": True} - - self.assertDictEqual( - linux_acl.present( - name, "d:" + acl_type, acl_name, perms, recurse=False - ), - ret, - ) - - # default recurse true - nothing to do - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Permissions are in the desired state" - ret = {"name": name, "comment": comt, "changes": {}, "result": True} - - self.assertDictEqual( - linux_acl.present( - name, "d:" + acl_type, acl_name, perms, recurse=True - ), - ret, - ) - - # 'absent' function tests: 2 - - def test_absent(self): - """ - Test to ensure a Linux ACL does not exist - """ - name = "/root" - acl_type = "users" - acl_name = "damian" - perms = "rwx" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - mock = MagicMock( - side_effect=[ - {name: {acl_type: [{acl_name: {"octal": "A"}}]}}, - {name: {acl_type: ""}}, - ] - ) - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Removing permissions" - ret.update({"comment": comt}) - self.assertDictEqual( - linux_acl.absent(name, acl_type, acl_name, perms), ret - ) - - comt = "ACL Type does not exist" - ret.update({"comment": comt, "result": False}) - self.assertDictEqual(linux_acl.absent(name, acl_type, acl_name, perms), ret) - - # 'list_present' function tests: 1 - - def test_list_present(self): - """ - Test to ensure a Linux ACL is present - """ - self.maxDiff = None - name = "/root" - acl_type = "user" - acl_names = ["root", "damian", "homer"] - acl_comment = {"owner": "root", "group": "root", "file": "/root"} - perms = "rwx" - - mock = MagicMock( - side_effect=[ - { - name: { - acl_type: [ - {acl_names[0]: {"octal": "A"}}, - {acl_names[1]: {"octal": "A"}}, - {acl_names[2]: {"octal": "A"}}, - ], - "comment": acl_comment, - } - }, - { - name: { - acl_type: [ - {acl_names[0]: {"octal": "A"}}, - {acl_names[1]: {"octal": "A"}}, - ], - "comment": acl_comment, - } - }, - { - name: { - acl_type: [ - {acl_names[0]: {"octal": "A"}}, - {acl_names[1]: {"octal": "A"}}, - ] - } - }, - {name: {acl_type: [{}]}}, - {name: {acl_type: [{}]}}, - {name: {acl_type: [{}]}}, - {name: {acl_type: ""}}, - ] - ) - mock_modfacl = MagicMock(return_value=True) - - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - # Update - test=True - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Updated permissions will be applied for {}: A -> {}".format( - acl_names, perms - ) - expected = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": 7, - }, - "old": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": "A", - }, - }, - "result": None, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(ret, expected) - - # Update - test=False - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Applied new permissions for {}".format(", ".join(acl_names)) - expected = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": "rwx", - } - }, - "result": True, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # Update - modfacl error - with patch.dict( - linux_acl.__salt__, - { - "acl.modfacl": MagicMock( - side_effect=CommandExecutionError("Custom err") - ) - }, - ): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Error updating permissions for {}: Custom err".format( - acl_names - ) - expected = { - "name": name, - "comment": comt, - "changes": {}, - "result": False, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # New - test=True - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "New permissions will be applied for {}: {}".format( - acl_names, perms - ) - expected = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": perms, - } - }, - "result": None, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # New - test=False - with patch.dict(linux_acl.__salt__, {"acl.modfacl": mock_modfacl}): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Applied new permissions for {}".format(", ".join(acl_names)) - expected = { - "name": name, - "comment": comt, - "changes": { - "new": { - "acl_name": ", ".join(acl_names), - "acl_type": acl_type, - "perms": perms, - } - }, - "result": True, - } - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # New - modfacl error - with patch.dict( - linux_acl.__salt__, - { - "acl.modfacl": MagicMock( - side_effect=CommandExecutionError("Custom err") - ) - }, - ): - with patch.dict(linux_acl.__opts__, {"test": False}): - comt = "Error updating permissions for {}: Custom err".format( - acl_names - ) - expected = { - "name": name, - "comment": comt, - "changes": {}, - "result": False, - } - - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # No acl type - comt = "ACL Type does not exist" - expected = { - "name": name, - "comment": comt, - "result": False, - "changes": {}, - } - ret = linux_acl.list_present(name, acl_type, acl_names, perms) - self.assertDictEqual(expected, ret) - - # 'list_absent' function tests: 2 - - def test_list_absent(self): - """ - Test to ensure a Linux ACL does not exist - """ - name = "/root" - acl_type = "users" - acl_names = ["damian", "homer"] - perms = "rwx" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - mock = MagicMock( - side_effect=[ - { - name: { - acl_type: [ - {acl_names[0]: {"octal": "A"}, acl_names[1]: {"octal": "A"}} - ] - } - }, - {name: {acl_type: ""}}, - ] - ) - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Removing permissions" - ret.update({"comment": comt}) - self.assertDictEqual( - linux_acl.list_absent(name, acl_type, acl_names, perms), ret - ) - - comt = "ACL Type does not exist" - ret.update({"comment": comt, "result": False}) - self.assertDictEqual(linux_acl.list_absent(name, acl_type, acl_names), ret) - - def test_absent_recursive(self): - """ - Test to ensure a Linux ACL does not exist - """ - name = "/root" - acl_type = "users" - acl_name = "damian" - perms = "rwx" - - ret = {"name": name, "result": None, "comment": "", "changes": {}} - - mock = MagicMock( - side_effect=[ - { - name: {acl_type: [{acl_name: {"octal": 7}}]}, - name + "/foo": {acl_type: [{acl_name: {"octal": "A"}}]}, - } - ] - ) - with patch.dict(linux_acl.__salt__, {"acl.getfacl": mock}): - with patch.dict(linux_acl.__opts__, {"test": True}): - comt = "Removing permissions" - ret.update({"comment": comt}) - self.assertDictEqual( - linux_acl.absent(name, acl_type, acl_name, perms, recurse=True), ret - ) diff --git a/tests/unit/states/test_net_napalm_yang.py b/tests/unit/states/test_net_napalm_yang.py deleted file mode 100644 index 5179ae926682..000000000000 --- a/tests/unit/states/test_net_napalm_yang.py +++ /dev/null @@ -1,57 +0,0 @@ -""" - :codeauthor: Anthony Shaw -""" - -import salt.states.net_napalm_yang as netyang -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -TEST_DATA = {"foo": "bar"} - - -class NetyangTestCase(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - return {netyang: {}} - - def test_managed(self): - ret = {"changes": {}, "comment": "Loaded.", "name": "test", "result": False} - parse = MagicMock(return_value="abcdef") - temp_file = MagicMock(return_value="") - compliance_report = MagicMock(return_value={"complies": False}) - load_config = MagicMock(return_value={"comment": "Loaded."}) - file_remove = MagicMock() - - with patch("salt.utils.files.fopen"): - with patch.dict( - netyang.__salt__, - { - "temp.file": temp_file, - "napalm_yang.parse": parse, - "napalm_yang.load_config": load_config, - "napalm_yang.compliance_report": compliance_report, - "file.remove": file_remove, - }, - ): - with patch.dict(netyang.__opts__, {"test": False}): - self.assertDictEqual( - netyang.managed("test", "test", models=("model1",)), ret - ) - assert parse.called - assert temp_file.called - assert compliance_report.called - assert load_config.called - assert file_remove.called - - def test_configured(self): - ret = {"changes": {}, "comment": "Loaded.", "name": "test", "result": False} - load_config = MagicMock(return_value={"comment": "Loaded."}) - - with patch("salt.utils.files.fopen"): - with patch.dict(netyang.__salt__, {"napalm_yang.load_config": load_config}): - with patch.dict(netyang.__opts__, {"test": False}): - self.assertDictEqual( - netyang.configured("test", "test", models=("model1",)), ret - ) - - assert load_config.called diff --git a/tests/unit/states/test_netconfig.py b/tests/unit/states/test_netconfig.py deleted file mode 100644 index 3d2cf83df9ef..000000000000 --- a/tests/unit/states/test_netconfig.py +++ /dev/null @@ -1,110 +0,0 @@ -""" - :codeauthor: Gareth J. Greenaway -""" - -import salt.modules.napalm_network as net_mod -import salt.states.netconfig as netconfig -import salt.utils.files -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class NetconfigTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.netconfig - """ - - def setup_loader_modules(self): - state_loader_globals = { - "__env__": "base", - "__salt__": {"net.replace_pattern": net_mod.replace_pattern}, - } - module_loader_globals = { - "__env__": "base", - "__salt__": { - "net.replace_pattern": net_mod.replace_pattern, - "net.load_config": net_mod.load_config, - }, - } - return {netconfig: state_loader_globals, net_mod: module_loader_globals} - - def test_replace_pattern_test_is_true(self): - """ - Test to replace_pattern to ensure that test=True - is being passed correctly. - """ - name = "name" - pattern = "OLD-POLICY-NAME" - repl = "new-policy-name" - - mock = MagicMock() - mock_net_replace_pattern = MagicMock() - mock_loaded_ret = MagicMock() - - with patch.dict(netconfig.__salt__, {"config.merge": mock}): - with patch.dict( - netconfig.__salt__, {"net.replace_pattern": mock_net_replace_pattern} - ): - with patch.object(salt.utils.napalm, "loaded_ret", mock_loaded_ret): - # Test if test=True is passed as argument to state.apply - with patch.dict(netconfig.__opts__, {"test": True}): - netconfig.replace_pattern(name, pattern, repl) - - # Get the args and kwargs from the mocked call net.replace_pattern - args, kwargs = mock_net_replace_pattern.call_args_list[0] - - # Verify that the keyword argument is True - self.assertTrue(kwargs["test"]) - - # Get the args and kwargs from the mocked call to salt.utils.napalm.loaded_ret - args, kwargs = mock_loaded_ret.call_args_list[0] - - # Verify that the third positional argument is True - self.assertTrue(args[2]) - - # Test if test=True is passed as argument to state directly - netconfig.replace_pattern(name, pattern, repl, test=True) - - # Get the args and kwargs from the mocked call net.replace_pattern - args, kwargs = mock_net_replace_pattern.call_args_list[0] - - # Verify that the keyword argument is True - self.assertTrue(kwargs["test"]) - - # Get the args and kwargs from the mocked call to salt.utils.napalm.loaded_ret - args, kwargs = mock_loaded_ret.call_args_list[0] - - # Verify that the third positional argument is True - self.assertTrue(args[2]) - - def test_managed_test_is_true(self): - """ - Test to managed to ensure that test=True - is being passed correctly. - """ - name = "name" - - mock = MagicMock() - mock_update_config = MagicMock() - - with patch.dict(netconfig.__salt__, {"config.merge": mock}): - with patch.object(netconfig, "_update_config", mock_update_config): - # Test if test=True is passed as argument to state.apply - with patch.dict(netconfig.__opts__, {"test": True}): - netconfig.managed(name) - - # Get the args and kwargs from the mocked call net.replace_pattern - args, kwargs = mock_update_config.call_args_list[0] - - # Verify that the keyword argument is True - self.assertTrue(kwargs["test"]) - - # Test if test=True is passed as argument to state directly - netconfig.managed(name, test=True) - - # Get the args and kwargs from the mocked call net.replace_pattern - args, kwargs = mock_update_config.call_args_list[0] - - # Verify that the keyword argument is True - self.assertTrue(kwargs["test"]) diff --git a/tests/unit/states/test_network.py b/tests/unit/states/test_network.py deleted file mode 100644 index 9d5ac33ce4e1..000000000000 --- a/tests/unit/states/test_network.py +++ /dev/null @@ -1,284 +0,0 @@ -""" - :codeauthor: Rahul Handay -""" - -import logging - -import salt.states.network as network -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - - -class MockNetwork: - """ - Mock network class - """ - - def __init__(self): - pass - - @staticmethod - def interfaces(): - """ - Mock interface method - """ - ifaces = { - "salt": {"up": 1}, - "lo": {"up": 1, "inet": [{"label": "lo"}, {"label": "lo:alias1"}]}, - } - return ifaces - - -class MockGrains: - """ - Mock Grains class - """ - - def __init__(self): - pass - - @staticmethod - def grains(lis, bol): - """ - Mock grains method - """ - return {"A": "B"} - - -class NetworkTestCase(TestCase, LoaderModuleMockMixin): - """ - Validate the network state - """ - - def setup_loader_modules(self): - return {network: {}} - - def test_managed(self): - """ - Test to ensure that the named interface is configured properly - """ - with patch("salt.states.network.salt.utils.network", MockNetwork()), patch( - "salt.states.network.salt.loader", MockGrains() - ): - ret = {"name": "salt", "changes": {}, "result": False, "comment": ""} - - change = { - "interface": "--- \n+++ \n@@ -1 +1 @@\n-A\n+B", - "status": "Interface salt restart to validate", - } - - dunder_salt = { - "ip.get_interface": MagicMock( - side_effect=[AttributeError, "A", "A", "A", "A", "A"] - ), - "ip.build_interface": MagicMock(return_value="B"), - "saltutil.refresh_grains": MagicMock(return_value=True), - } - - with patch.dict(network.__salt__, dunder_salt): - with patch.dict( - network.__salt__, - {"ip.get_bond": MagicMock(side_effect=AttributeError)}, - ): - self.assertDictEqual( - network.managed("salt", type="bond", test=True), ret - ) - - ret.update( - { - "comment": ( - "Interface salt is set to be" - " updated:\n--- \n+++ \n@@ -1 +1 @@\n-A\n+B" - ), - "result": None, - } - ) - self.assertDictEqual( - network.managed("salt", type="stack", test=True), ret - ) - - ipupdown = MagicMock(return_value=True) - with patch.dict( - network.__salt__, {"ip.down": ipupdown, "ip.up": ipupdown} - ): - ret.update( - { - "comment": "Interface salt updated.", - "result": True, - "changes": change, - } - ) - self.assertDictEqual(network.managed("salt", type="stack"), ret) - - with patch.dict(network.__grains__, {"A": True}): - ret.update( - { - "result": True, - "changes": { - "interface": "--- \n+++ \n@@ -1 +1 @@\n-A\n+B", - "status": "Interface salt down", - }, - } - ) - self.assertDictEqual( - network.managed("salt", type="stack", enabled=False), ret - ) - - mock = MagicMock(return_value=True) - with patch.dict(network.__salt__, {"ip.down": mock}): - with patch.dict( - network.__salt__, {"saltutil.refresh_modules": mock} - ): - change = { - "interface": "--- \n+++ \n@@ -1 +1 @@\n-A\n+B", - "status": "Interface lo:alias1 down", - } - ret.update( - { - "name": "lo:alias1", - "comment": "Interface lo:alias1 updated.", - "result": True, - "changes": change, - } - ) - self.assertDictEqual( - network.managed("lo:alias1", type="eth", enabled=False), - ret, - ) - - def test_routes(self): - """ - Test to manage network interface static routes. - """ - ret = {"name": "salt", "changes": {}, "result": False, "comment": ""} - - mock = MagicMock( - side_effect=[AttributeError, False, False, "True", False, False] - ) - with patch.dict(network.__salt__, {"ip.get_routes": mock}): - self.assertDictEqual(network.routes("salt"), ret) - - mock = MagicMock(side_effect=[False, True, "", True, True]) - with patch.dict(network.__salt__, {"ip.build_routes": mock}): - ret.update( - {"result": True, "comment": "Interface salt routes are up to date."} - ) - self.assertDictEqual(network.routes("salt", test="a"), ret) - - ret.update( - { - "comment": "Interface salt routes are set to be added.", - "result": None, - } - ) - self.assertDictEqual(network.routes("salt", test="a"), ret) - - ret.update( - { - "comment": ( - "Interface salt routes are set to be" - " updated:\n--- \n+++ \n@@ -1,4 +0,0 @@\n-T\n-r" - "\n-u\n-e" - ) - } - ) - self.assertDictEqual(network.routes("salt", test="a"), ret) - - mock = MagicMock(side_effect=[AttributeError, True]) - with patch.dict(network.__salt__, {"ip.apply_network_settings": mock}): - ret.update( - { - "changes": { - "network_routes": "Added interface salt routes." - }, - "comment": "", - "result": False, - } - ) - self.assertDictEqual(network.routes("salt"), ret) - - ret.update( - { - "changes": { - "network_routes": "Added interface salt routes." - }, - "comment": "Interface salt routes added.", - "result": True, - } - ) - self.assertDictEqual(network.routes("salt"), ret) - - def test_system(self): - """ - Test to ensure that global network settings - are configured properly - """ - ret = {"name": "salt", "changes": {}, "result": False, "comment": ""} - - with patch.dict(network.__opts__, {"test": True}): - mock = MagicMock(side_effect=[AttributeError, False, False, "As"]) - with patch.dict(network.__salt__, {"ip.get_network_settings": mock}): - self.assertDictEqual(network.system("salt"), ret) - - mock = MagicMock(side_effect=[False, True, ""]) - with patch.dict(network.__salt__, {"ip.build_network_settings": mock}): - ret.update( - { - "comment": "Global network settings are up to date.", - "result": True, - } - ) - self.assertDictEqual(network.system("salt"), ret) - - ret.update( - { - "comment": "Global network settings are set to be added.", - "result": None, - } - ) - self.assertDictEqual(network.system("salt"), ret) - - ret.update( - { - "comment": ( - "Global network settings are set to" - " be updated:\n--- \n+++ \n@@ -1,2 +0,0" - " @@\n-A\n-s" - ) - } - ) - self.assertDictEqual(network.system("salt"), ret) - - with patch.dict(network.__opts__, {"test": False}): - mock = MagicMock(side_effect=[False, False]) - with patch.dict(network.__salt__, {"ip.get_network_settings": mock}): - mock = MagicMock(side_effect=[True, True]) - with patch.dict(network.__salt__, {"ip.build_network_settings": mock}): - mock = MagicMock(side_effect=[AttributeError, True]) - with patch.dict( - network.__salt__, {"ip.apply_network_settings": mock} - ): - ret.update( - { - "changes": { - "network_settings": "Added global network settings." - }, - "comment": "", - "result": False, - } - ) - self.assertDictEqual(network.system("salt"), ret) - - ret.update( - { - "changes": { - "network_settings": "Added global network settings." - }, - "comment": "Global network settings are up to date.", - "result": True, - } - ) - self.assertDictEqual(network.system("salt"), ret) diff --git a/tests/unit/states/test_pip_state.py b/tests/unit/states/test_pip_state.py index 5e4b6e0af141..dbac7b926e69 100644 --- a/tests/unit/states/test_pip_state.py +++ b/tests/unit/states/test_pip_state.py @@ -379,6 +379,24 @@ def test_install_in_editable_mode(self): self.assertSaltTrueReturn({"test": ret}) self.assertInSaltComment("successfully installed", {"test": ret}) + def test_install_with_specified_user(self): + """ + Check that if `user` parameter is set and the user does not exists + it will fail with an error, see #65458 + """ + user_info = MagicMock(return_value={}) + pip_version = MagicMock(return_value="10.0.1") + with patch.dict( + pip_state.__salt__, + { + "user.info": user_info, + "pip.version": pip_version, + }, + ): + ret = pip_state.installed("mypkg", user="fred") + self.assertSaltFalseReturn({"test": ret}) + self.assertInSaltComment("User fred does not exist", {"test": ret}) + class PipStateUtilsTest(TestCase): def test_has_internal_exceptions_mod_function(self): @@ -414,7 +432,7 @@ def test_importable_installation_error(self): extra_requirements = [] for name, version in salt.version.dependency_information(): if name in ["PyYAML", "packaging", "looseversion"]: - extra_requirements.append("{}=={}".format(name, version)) + extra_requirements.append(f"{name}=={version}") failures = {} pip_version_requirements = [ # Latest pip 18 @@ -453,7 +471,7 @@ def test_importable_installation_error(self): with VirtualEnv() as venv: venv.install(*extra_requirements) if requirement: - venv.install("pip{}".format(requirement)) + venv.install(f"pip{requirement}") try: subprocess.check_output([venv.venv_python, "-c", code]) except subprocess.CalledProcessError as exc: diff --git a/tests/unit/states/test_saltmod.py b/tests/unit/states/test_saltmod.py deleted file mode 100644 index 1dac93e344f9..000000000000 --- a/tests/unit/states/test_saltmod.py +++ /dev/null @@ -1,516 +0,0 @@ -""" - :codeauthor: Jayesh Kariya -""" - -import copy -import os -import tempfile - -import pytest - -import salt.config -import salt.loader -import salt.states.saltmod as saltmod -import salt.utils.event -import salt.utils.jid -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.mock import MagicMock, patch -from tests.support.runtests import RUNTIME_VARS -from tests.support.unit import TestCase - - -class SaltmodTestCase(TestCase, LoaderModuleMockMixin): - """ - Test cases for salt.states.saltmod - """ - - def setup_loader_modules(self): - utils = salt.loader.utils( - salt.config.DEFAULT_MINION_OPTS.copy(), whitelist=["state"] - ) - return { - saltmod: { - "__env__": "base", - "__opts__": { - "__role": "master", - "file_client": "remote", - "sock_dir": tempfile.mkdtemp(dir=RUNTIME_VARS.TMP), - "transport": "tcp", - }, - "__salt__": {"saltutil.cmd": MagicMock()}, - "__orchestration_jid__": salt.utils.jid.gen_jid({}), - "__utils__": utils, - } - } - - # 'state' function tests: 1 - - @pytest.mark.slow_test - def test_state(self): - """ - Test to invoke a state run on a given target - """ - name = "state" - tgt = "minion1" - - comt = "Passed invalid value for 'allow_fail', must be an int" - - ret = {"name": name, "changes": {}, "result": False, "comment": comt} - - test_ret = { - "name": name, - "changes": {}, - "result": True, - "comment": "States ran successfully.", - } - - test_batch_return = { - "minion1": { - "ret": { - "test_|-notify_me_|-this is a name_|-show_notification": { - "comment": "Notify me", - "name": "this is a name", - "start_time": "10:43:41.487565", - "result": True, - "duration": 0.35, - "__run_num__": 0, - "__sls__": "demo", - "changes": {}, - "__id__": "notify_me", - }, - "retcode": 0, - }, - "out": "highstate", - }, - "minion2": { - "ret": { - "test_|-notify_me_|-this is a name_|-show_notification": { - "comment": "Notify me", - "name": "this is a name", - "start_time": "10:43:41.487565", - "result": True, - "duration": 0.35, - "__run_num__": 0, - "__sls__": "demo", - "changes": {}, - "__id__": "notify_me", - }, - "retcode": 0, - }, - "out": "highstate", - }, - "minion3": { - "ret": { - "test_|-notify_me_|-this is a name_|-show_notification": { - "comment": "Notify me", - "name": "this is a name", - "start_time": "10:43:41.487565", - "result": True, - "duration": 0.35, - "__run_num__": 0, - "__sls__": "demo", - "changes": {}, - "__id__": "notify_me", - }, - "retcode": 0, - }, - "out": "highstate", - }, - } - - self.assertDictEqual(saltmod.state(name, tgt, allow_fail="a"), ret) - - comt = "No highstate or sls specified, no execution made" - ret.update({"comment": comt}) - self.assertDictEqual(saltmod.state(name, tgt), ret) - - comt = "Must pass in boolean for value of 'concurrent'" - ret.update({"comment": comt}) - self.assertDictEqual( - saltmod.state(name, tgt, highstate=True, concurrent="a"), ret - ) - - ret.update({"comment": comt, "result": None}) - with patch.dict(saltmod.__opts__, {"test": True}): - self.assertDictEqual(saltmod.state(name, tgt, highstate=True), test_ret) - - ret.update( - { - "comment": "States ran successfully. No changes made to silver.", - "result": True, - "__jid__": "20170406104341210934", - } - ) - with patch.dict(saltmod.__opts__, {"test": False}): - mock = MagicMock( - return_value={ - "silver": { - "jid": "20170406104341210934", - "retcode": 0, - "ret": { - "test_|-notify_me_|-this is a name_|-show_notification": { - "comment": "Notify me", - "name": "this is a name", - "start_time": "10:43:41.487565", - "result": True, - "duration": 0.35, - "__run_num__": 0, - "__sls__": "demo", - "changes": {}, - "__id__": "notify_me", - } - }, - "out": "highstate", - } - } - ) - with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock}): - self.assertDictEqual(saltmod.state(name, tgt, highstate=True), ret) - - ret.update( - { - "comment": ( - "States ran successfully. No changes made to minion1, minion3," - " minion2." - ) - } - ) - del ret["__jid__"] - with patch.dict(saltmod.__opts__, {"test": False}): - with patch.dict( - saltmod.__salt__, - {"saltutil.cmd": MagicMock(return_value=test_batch_return)}, - ): - state_run = saltmod.state(name, tgt, highstate=True) - - # Test return without checking the comment contents. Comments are tested later. - comment = state_run.pop("comment") - ret.pop("comment") - self.assertDictEqual(state_run, ret) - - # Check the comment contents in a non-order specific way (ordering fails sometimes on PY3) - self.assertIn("States ran successfully. No changes made to", comment) - for minion in ["minion1", "minion2", "minion3"]: - self.assertIn(minion, comment) - - # 'function' function tests: 1 - - @pytest.mark.slow_test - def test_function(self): - """ - Test to execute a single module function on a remote - minion via salt or salt-ssh - """ - name = "state" - tgt = "larry" - - ret = { - "name": name, - "changes": {}, - "result": None, - "comment": "Function state would be executed on target {}".format(tgt), - } - - with patch.dict(saltmod.__opts__, {"test": True}): - self.assertDictEqual(saltmod.function(name, tgt), ret) - - ret.update( - { - "result": True, - "changes": {"ret": {tgt: ""}}, - "comment": ( - "Function ran successfully. Function state ran on {}.".format(tgt) - ), - } - ) - with patch.dict(saltmod.__opts__, {"test": False}): - mock_ret = {"larry": {"ret": "", "retcode": 0, "failed": False}} - mock_cmd = MagicMock(return_value=mock_ret) - with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock_cmd}): - self.assertDictEqual(saltmod.function(name, tgt), ret) - - @pytest.mark.slow_test - def test_function_when_no_minions_match(self): - """ - Test to execute a single module function on a remote - minion via salt or salt-ssh - """ - name = "state" - tgt = "larry" - mock_ret = {} - mock_cmd = MagicMock(return_value=mock_ret) - - ret = { - "name": name, - "changes": {}, - "result": False, - "comment": "No minions responded", - } - - with patch.dict(saltmod.__opts__, {"test": False}): - with patch.dict(saltmod.__salt__, {"saltutil.cmd": mock_cmd}): - self.assertDictEqual(saltmod.function(name, tgt), ret) - - # 'wait_for_event' function tests: 1 - - def test_wait_for_event(self): - """ - Test to watch Salt's event bus and block until a condition is met - """ - name = "state" - tgt = "minion1" - - comt = "Timeout value reached." - - ret = {"name": name, "changes": {}, "result": False, "comment": comt} - - class Mockevent: - """ - Mock event class - """ - - flag = None - - def __init__(self): - self.full = None - - def get_event(self, full): - """ - Mock get_event method - """ - self.full = full - if self.flag: - return {"tag": name, "data": {}} - return None - - def __enter__(self): - return self - - def __exit__(self, *args): - pass - - with patch.object( - salt.utils.event, "get_event", MagicMock(return_value=Mockevent()) - ): - with patch.dict(saltmod.__opts__, {"sock_dir": True, "transport": True}): - with patch( - "salt.states.saltmod.time.time", MagicMock(return_value=1.0) - ): - self.assertDictEqual( - saltmod.wait_for_event(name, "salt", timeout=-1.0), ret - ) - - Mockevent.flag = True - ret.update( - {"comment": "All events seen in 0.0 seconds.", "result": True} - ) - self.assertDictEqual(saltmod.wait_for_event(name, ""), ret) - - ret.update({"comment": "Timeout value reached.", "result": False}) - self.assertDictEqual( - saltmod.wait_for_event(name, tgt, timeout=-1.0), ret - ) - - # 'wait_for_event' function tests: 2 - - def test_wait_for_event_list_single_event(self): - """ - Test to watch Salt's event bus and block until a condition is met - """ - name = "presence" - event_id = "lost" - tgt = ["minion_1", "minion_2", "minion_3"] - - comt = "Timeout value reached." - - ret = {"name": name, "changes": {}, "result": False, "comment": comt} - - class Mockevent: - """ - Mock event class - """ - - flag = None - - def __init__(self): - self.full = None - - def get_event(self, full): - """ - Mock get_event method - """ - self.full = full - if self.flag: - return {"tag": name, "data": {"lost": tgt}} - return None - - def __enter__(self): - return self - - def __exit__(self, *args): - pass - - with patch.object( - salt.utils.event, "get_event", MagicMock(return_value=Mockevent()) - ): - with patch.dict(saltmod.__opts__, {"sock_dir": True, "transport": True}): - with patch( - "salt.states.saltmod.time.time", MagicMock(return_value=1.0) - ): - ret.update({"comment": "Timeout value reached.", "result": False}) - self.assertDictEqual( - saltmod.wait_for_event( - name, tgt, event_id=event_id, timeout=-1.0 - ), - ret, - ) - - Mockevent.flag = True - ret.update( - { - "name": name, - "changes": {"minions_seen": tgt}, - "result": True, - "comment": "All events seen in 0.0 seconds.", - } - ) - self.assertDictEqual( - saltmod.wait_for_event( - name, copy.deepcopy(tgt), event_id="lost", timeout=1.0 - ), - ret, - ) - - # 'runner' function tests: 1 - - def test_runner(self): - """ - Test to execute a runner module on the master - """ - name = "state" - - ret = { - "changes": {"return": True}, - "name": "state", - "result": True, - "comment": "Runner function 'state' executed.", - } - runner_mock = MagicMock(return_value={"return": True}) - - with patch.dict(saltmod.__salt__, {"saltutil.runner": runner_mock}): - self.assertDictEqual(saltmod.runner(name), ret) - - # 'wheel' function tests: 1 - - def test_wheel(self): - """ - Test to execute a wheel module on the master - """ - name = "state" - - ret = { - "changes": {"return": True}, - "name": "state", - "result": True, - "comment": "Wheel function 'state' executed.", - } - wheel_mock = MagicMock(return_value={"return": True}) - - with patch.dict(saltmod.__salt__, {"saltutil.wheel": wheel_mock}): - self.assertDictEqual(saltmod.wheel(name), ret) - - @pytest.mark.slow_test - def test_state_ssh(self): - """ - Test saltmod state passes roster to saltutil.cmd - """ - origcmd = saltmod.__salt__["saltutil.cmd"] - cmd_kwargs = {} - cmd_args = [] - - def cmd_mock(*args, **kwargs): - cmd_args.extend(args) - cmd_kwargs.update(kwargs) - return origcmd(*args, **kwargs) - - with patch.dict(saltmod.__salt__, {"saltutil.cmd": cmd_mock}): - ret = saltmod.state( - "state.sls", tgt="*", ssh=True, highstate=True, roster="my_roster" - ) - assert "roster" in cmd_kwargs - assert cmd_kwargs["roster"] == "my_roster" - - @pytest.mark.slow_test - def test_function_ssh(self): - """ - Test saltmod function passes roster to saltutil.cmd - """ - origcmd = saltmod.__salt__["saltutil.cmd"] - cmd_kwargs = {} - cmd_args = [] - - def cmd_mock(*args, **kwargs): - cmd_args.extend(args) - cmd_kwargs.update(kwargs) - return origcmd(*args, **kwargs) - - with patch.dict(saltmod.__opts__, {"test": False}), patch.dict( - saltmod.__salt__, {"saltutil.cmd": cmd_mock} - ): - saltmod.function("state", tgt="*", ssh=True, roster="my_roster") - assert "roster" in cmd_kwargs - assert cmd_kwargs["roster"] == "my_roster" - - -class StatemodTests(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - self.tmp_cachedir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) - return { - saltmod: { - "__env__": "base", - "__opts__": { - "id": "webserver2", - "argv": [], - "__role": "master", - "cachedir": self.tmp_cachedir, - "extension_modules": os.path.join(self.tmp_cachedir, "extmods"), - }, - "__salt__": {"saltutil.cmd": MagicMock()}, - "__orchestration_jid__": salt.utils.jid.gen_jid({}), - } - } - - def test_statemod_state(self): - """Smoke test for for salt.states.statemod.state(). Ensures that we - don't take an exception if optional parameters are not specified in - __opts__ or __env__. - """ - args = ("webserver_setup", "webserver2") - kwargs = { - "tgt_type": "glob", - "fail_minions": None, - "pillar": None, - "top": None, - "batch": None, - "orchestration_jid": None, - "sls": "vroom", - "queue": False, - "concurrent": False, - "highstate": None, - "expr_form": None, - "ret": "", - "ssh": False, - "timeout": None, - "test": False, - "allow_fail": 0, - "saltenv": None, - "expect_minions": False, - } - ret = saltmod.state(*args, **kwargs) - expected = { - "comment": "States ran successfully.", - "changes": {}, - "name": "webserver_setup", - "result": True, - } - self.assertEqual(ret, expected) diff --git a/tests/unit/states/test_zcbuildout.py b/tests/unit/states/test_zcbuildout.py index db6013076d1f..7cafbba6a62f 100644 --- a/tests/unit/states/test_zcbuildout.py +++ b/tests/unit/states/test_zcbuildout.py @@ -10,6 +10,16 @@ from tests.support.runtests import RUNTIME_VARS from tests.unit.modules.test_zcbuildout import KNOWN_VIRTUALENV_BINARY_NAMES, Base +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, + pytest.mark.skip_on_windows( + reason=( + "Special steps are required for proper SSL validation because " + "`easy_install` is too old(and deprecated)." + ) + ), +] + @pytest.mark.skip_if_binaries_missing(*KNOWN_VIRTUALENV_BINARY_NAMES, check_all=False) @pytest.mark.requires_network diff --git a/tests/unit/test_acl.py b/tests/unit/test_acl.py deleted file mode 100644 index ff7a0604080d..000000000000 --- a/tests/unit/test_acl.py +++ /dev/null @@ -1,87 +0,0 @@ -from salt import acl -from tests.support.unit import TestCase - - -class ClientACLTestCase(TestCase): - """ - Unit tests for salt.acl.ClientACL - """ - - def setUp(self): - self.blacklist = { - "users": ["joker", "penguin", "*bad_*", "blocked_.*", "^Homer$"], - "modules": ["cmd.run", "test.fib", "rm-rf.*"], - } - self.whitelist = { - "users": ["testuser", "saltuser"], - "modules": ["test.ping", "grains.items"], - } - - def tearDown(self): - del self.blacklist - del self.whitelist - - def test_user_is_blacklisted(self): - """ - test user_is_blacklisted - """ - client_acl = acl.PublisherACL(self.blacklist) - - self.assertTrue(client_acl.user_is_blacklisted("joker")) - self.assertTrue(client_acl.user_is_blacklisted("penguin")) - self.assertTrue(client_acl.user_is_blacklisted("bad_")) - self.assertTrue(client_acl.user_is_blacklisted("bad_user")) - self.assertTrue(client_acl.user_is_blacklisted("bad_*")) - self.assertTrue(client_acl.user_is_blacklisted("user_bad_")) - self.assertTrue(client_acl.user_is_blacklisted("blocked_")) - self.assertTrue(client_acl.user_is_blacklisted("blocked_user")) - self.assertTrue(client_acl.user_is_blacklisted("blocked_.*")) - self.assertTrue(client_acl.user_is_blacklisted("Homer")) - - self.assertFalse(client_acl.user_is_blacklisted("batman")) - self.assertFalse(client_acl.user_is_blacklisted("robin")) - self.assertFalse(client_acl.user_is_blacklisted("bad")) - self.assertFalse(client_acl.user_is_blacklisted("blocked")) - self.assertFalse(client_acl.user_is_blacklisted("NotHomer")) - self.assertFalse(client_acl.user_is_blacklisted("HomerSimpson")) - - def test_cmd_is_blacklisted(self): - """ - test cmd_is_blacklisted - """ - client_acl = acl.PublisherACL(self.blacklist) - - self.assertTrue(client_acl.cmd_is_blacklisted("cmd.run")) - self.assertTrue(client_acl.cmd_is_blacklisted("test.fib")) - self.assertTrue(client_acl.cmd_is_blacklisted("rm-rf.root")) - - self.assertFalse(client_acl.cmd_is_blacklisted("cmd.shell")) - self.assertFalse(client_acl.cmd_is_blacklisted("test.versions")) - self.assertFalse(client_acl.cmd_is_blacklisted("arm-rf.root")) - - self.assertTrue(client_acl.cmd_is_blacklisted(["cmd.run", "state.sls"])) - self.assertFalse( - client_acl.cmd_is_blacklisted(["state.highstate", "state.sls"]) - ) - - def test_user_is_whitelisted(self): - """ - test user_is_whitelisted - """ - client_acl = acl.PublisherACL(self.whitelist) - - self.assertTrue(client_acl.user_is_whitelisted("testuser")) - self.assertTrue(client_acl.user_is_whitelisted("saltuser")) - self.assertFalse(client_acl.user_is_whitelisted("three")) - self.assertFalse(client_acl.user_is_whitelisted("hans")) - - def test_cmd_is_whitelisted(self): - """ - test cmd_is_whitelisted - """ - client_acl = acl.PublisherACL(self.whitelist) - - self.assertTrue(client_acl.cmd_is_whitelisted("test.ping")) - self.assertTrue(client_acl.cmd_is_whitelisted("grains.items")) - self.assertFalse(client_acl.cmd_is_whitelisted("cmd.run")) - self.assertFalse(client_acl.cmd_is_whitelisted("test.version")) diff --git a/tests/unit/test_fileclient.py b/tests/unit/test_fileclient.py deleted file mode 100644 index 0b5400e44bc5..000000000000 --- a/tests/unit/test_fileclient.py +++ /dev/null @@ -1,476 +0,0 @@ -""" -Tests for the salt fileclient -""" - - -import errno -import logging -import os -import shutil - -import salt.utils.files -from salt import fileclient -from tests.support.mixins import ( - AdaptedConfigurationTestCaseMixin, - LoaderModuleMockMixin, -) -from tests.support.mock import MagicMock, Mock, patch -from tests.support.runtests import RUNTIME_VARS -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - - -class FileclientTestCase(TestCase): - """ - Fileclient test - """ - - opts = { - "extension_modules": "", - "cachedir": "/__test__", - } - - def _fake_makedir(self, num=errno.EEXIST): - def _side_effect(*args, **kwargs): - raise OSError(num, "Errno {}".format(num)) - - return Mock(side_effect=_side_effect) - - def test_cache_skips_makedirs_on_race_condition(self): - """ - If cache contains already a directory, do not raise an exception. - """ - with patch("os.path.isfile", lambda prm: False): - for exists in range(2): - with patch("os.makedirs", self._fake_makedir()): - with fileclient.Client(self.opts)._cache_loc( - "testfile" - ) as c_ref_itr: - assert c_ref_itr == os.sep + os.sep.join( - ["__test__", "files", "base", "testfile"] - ) - - def test_cache_raises_exception_on_non_eexist_ioerror(self): - """ - If makedirs raises other than EEXIST errno, an exception should be raised. - """ - with patch("os.path.isfile", lambda prm: False): - with patch("os.makedirs", self._fake_makedir(num=errno.EROFS)): - with self.assertRaises(OSError): - with fileclient.Client(self.opts)._cache_loc( - "testfile" - ) as c_ref_itr: - assert c_ref_itr == "/__test__/files/base/testfile" - - def test_cache_extrn_path_valid(self): - """ - Tests for extrn_filepath for a given url - """ - file_name = "http://localhost:8000/test/location/src/dev/usr/file" - - ret = fileclient.Client(self.opts)._extrn_path(file_name, "base") - assert ret == os.path.join("__test__", "extrn_files", "base", ret) - - def test_cache_extrn_path_invalid(self): - """ - Tests for extrn_filepath for a given url - """ - file_name = "http://localhost:8000/../../../../../usr/bin/bad" - - ret = fileclient.Client(self.opts)._extrn_path(file_name, "base") - assert ret == "Invalid path" - - def test_extrn_path_with_long_filename(self): - safe_file_name = os.path.split( - fileclient.Client(self.opts)._extrn_path( - "https://test.com/" + ("A" * 254), "base" - ) - )[-1] - assert safe_file_name == "A" * 254 - - oversized_file_name = os.path.split( - fileclient.Client(self.opts)._extrn_path( - "https://test.com/" + ("A" * 255), "base" - ) - )[-1] - assert len(oversized_file_name) < 256 - assert oversized_file_name != "A" * 255 - - oversized_file_with_query_params = os.path.split( - fileclient.Client(self.opts)._extrn_path( - "https://test.com/file?" + ("A" * 255), "base" - ) - )[-1] - assert len(oversized_file_with_query_params) < 256 - - -SALTENVS = ("base", "dev") -SUBDIR = "subdir" -SUBDIR_FILES = ("foo.txt", "bar.txt", "baz.txt") - - -def _get_file_roots(fs_root): - return {x: [os.path.join(fs_root, x)] for x in SALTENVS} - - -class FileClientTest( - TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMixin -): - def setup_loader_modules(self): - FS_ROOT = os.path.join(RUNTIME_VARS.TMP, "fileclient_fs_root") - CACHE_ROOT = os.path.join(RUNTIME_VARS.TMP, "fileclient_cache_root") - MOCKED_OPTS = { - "file_roots": _get_file_roots(FS_ROOT), - "fileserver_backend": ["roots"], - "cachedir": CACHE_ROOT, - "file_client": "local", - } - self.addCleanup(shutil.rmtree, FS_ROOT, ignore_errors=True) - self.addCleanup(shutil.rmtree, CACHE_ROOT, ignore_errors=True) - return {fileclient: {"__opts__": MOCKED_OPTS}} - - def setUp(self): - self.file_client = fileclient.Client(self.master_opts) - - def tearDown(self): - del self.file_client - - def test_file_list_emptydirs(self): - """ - Ensure that the fileclient class won't allow a direct call to file_list_emptydirs() - """ - with self.assertRaises(NotImplementedError): - self.file_client.file_list_emptydirs() - - def test_get_file(self): - """ - Ensure that the fileclient class won't allow a direct call to get_file() - """ - with self.assertRaises(NotImplementedError): - self.file_client.get_file(None) - - def test_get_file_client(self): - minion_opts = self.get_temp_config("minion") - minion_opts["file_client"] = "remote" - with patch( - "salt.fileclient.RemoteClient", MagicMock(return_value="remote_client") - ): - ret = fileclient.get_file_client(minion_opts) - self.assertEqual("remote_client", ret) - - -class FileclientCacheTest( - TestCase, AdaptedConfigurationTestCaseMixin, LoaderModuleMockMixin -): - """ - Tests for the fileclient caching. The LocalClient is the only thing we can - test as it is the only way we can mock the fileclient (the tests run from - the minion process, so the master cannot be mocked from test code). - """ - - def setup_loader_modules(self): - self.FS_ROOT = os.path.join(RUNTIME_VARS.TMP, "fileclient_fs_root") - self.CACHE_ROOT = os.path.join(RUNTIME_VARS.TMP, "fileclient_cache_root") - self.MOCKED_OPTS = { - "file_roots": _get_file_roots(self.FS_ROOT), - "fileserver_backend": ["roots"], - "cachedir": self.CACHE_ROOT, - "file_client": "local", - } - self.addCleanup(shutil.rmtree, self.FS_ROOT, ignore_errors=True) - self.addCleanup(shutil.rmtree, self.CACHE_ROOT, ignore_errors=True) - return {fileclient: {"__opts__": self.MOCKED_OPTS}} - - def setUp(self): - """ - No need to add a dummy foo.txt to muddy up the github repo, just make - our own fileserver root on-the-fly. - """ - - def _new_dir(path): - """ - Add a new dir at ``path`` using os.makedirs. If the directory - already exists, remove it recursively and then try to create it - again. - """ - try: - os.makedirs(path) - except OSError as exc: - if exc.errno == errno.EEXIST: - # Just in case a previous test was interrupted, remove the - # directory and try adding it again. - shutil.rmtree(path) - os.makedirs(path) - else: - raise - - # Crete the FS_ROOT - for saltenv in SALTENVS: - saltenv_root = os.path.join(self.FS_ROOT, saltenv) - # Make sure we have a fresh root dir for this saltenv - _new_dir(saltenv_root) - - path = os.path.join(saltenv_root, "foo.txt") - with salt.utils.files.fopen(path, "w") as fp_: - fp_.write("This is a test file in the '{}' saltenv.\n".format(saltenv)) - - subdir_abspath = os.path.join(saltenv_root, SUBDIR) - os.makedirs(subdir_abspath) - for subdir_file in SUBDIR_FILES: - path = os.path.join(subdir_abspath, subdir_file) - with salt.utils.files.fopen(path, "w") as fp_: - fp_.write( - "This is file '{}' in subdir '{} from saltenv '{}'".format( - subdir_file, SUBDIR, saltenv - ) - ) - - # Create the CACHE_ROOT - _new_dir(self.CACHE_ROOT) - - def test_cache_dir(self): - """ - Ensure entire directory is cached to correct location - """ - patched_opts = {x: y for x, y in self.minion_opts.items()} - patched_opts.update(self.MOCKED_OPTS) - - with patch.dict(fileclient.__opts__, patched_opts): - client = fileclient.get_file_client(fileclient.__opts__, pillar=False) - for saltenv in SALTENVS: - self.assertTrue( - client.cache_dir("salt://{}".format(SUBDIR), saltenv, cachedir=None) - ) - for subdir_file in SUBDIR_FILES: - cache_loc = os.path.join( - fileclient.__opts__["cachedir"], - "files", - saltenv, - SUBDIR, - subdir_file, - ) - # Double check that the content of the cached file - # identifies it as being from the correct saltenv. The - # setUp function creates the file with the name of the - # saltenv mentioned in the file, so a simple 'in' check is - # sufficient here. If opening the file raises an exception, - # this is a problem, so we are not catching the exception - # and letting it be raised so that the test fails. - with salt.utils.files.fopen(cache_loc) as fp_: - content = fp_.read() - log.debug("cache_loc = %s", cache_loc) - log.debug("content = %s", content) - self.assertTrue(subdir_file in content) - self.assertTrue(SUBDIR in content) - self.assertTrue(saltenv in content) - - def test_cache_dir_with_alternate_cachedir_and_absolute_path(self): - """ - Ensure entire directory is cached to correct location when an alternate - cachedir is specified and that cachedir is an absolute path - """ - patched_opts = {x: y for x, y in self.minion_opts.items()} - patched_opts.update(self.MOCKED_OPTS) - alt_cachedir = os.path.join(RUNTIME_VARS.TMP, "abs_cachedir") - - with patch.dict(fileclient.__opts__, patched_opts): - client = fileclient.get_file_client(fileclient.__opts__, pillar=False) - for saltenv in SALTENVS: - self.assertTrue( - client.cache_dir( - "salt://{}".format(SUBDIR), saltenv, cachedir=alt_cachedir - ) - ) - for subdir_file in SUBDIR_FILES: - cache_loc = os.path.join( - alt_cachedir, "files", saltenv, SUBDIR, subdir_file - ) - # Double check that the content of the cached file - # identifies it as being from the correct saltenv. The - # setUp function creates the file with the name of the - # saltenv mentioned in the file, so a simple 'in' check is - # sufficient here. If opening the file raises an exception, - # this is a problem, so we are not catching the exception - # and letting it be raised so that the test fails. - with salt.utils.files.fopen(cache_loc) as fp_: - content = fp_.read() - log.debug("cache_loc = %s", cache_loc) - log.debug("content = %s", content) - self.assertTrue(subdir_file in content) - self.assertTrue(SUBDIR in content) - self.assertTrue(saltenv in content) - - def test_cache_dir_with_alternate_cachedir_and_relative_path(self): - """ - Ensure entire directory is cached to correct location when an alternate - cachedir is specified and that cachedir is a relative path - """ - patched_opts = {x: y for x, y in self.minion_opts.items()} - patched_opts.update(self.MOCKED_OPTS) - alt_cachedir = "foo" - - with patch.dict(fileclient.__opts__, patched_opts): - client = fileclient.get_file_client(fileclient.__opts__, pillar=False) - for saltenv in SALTENVS: - self.assertTrue( - client.cache_dir( - "salt://{}".format(SUBDIR), saltenv, cachedir=alt_cachedir - ) - ) - for subdir_file in SUBDIR_FILES: - cache_loc = os.path.join( - fileclient.__opts__["cachedir"], - alt_cachedir, - "files", - saltenv, - SUBDIR, - subdir_file, - ) - # Double check that the content of the cached file - # identifies it as being from the correct saltenv. The - # setUp function creates the file with the name of the - # saltenv mentioned in the file, so a simple 'in' check is - # sufficient here. If opening the file raises an exception, - # this is a problem, so we are not catching the exception - # and letting it be raised so that the test fails. - with salt.utils.files.fopen(cache_loc) as fp_: - content = fp_.read() - log.debug("cache_loc = %s", cache_loc) - log.debug("content = %s", content) - self.assertTrue(subdir_file in content) - self.assertTrue(SUBDIR in content) - self.assertTrue(saltenv in content) - - def test_cache_file(self): - """ - Ensure file is cached to correct location - """ - patched_opts = {x: y for x, y in self.minion_opts.items()} - patched_opts.update(self.MOCKED_OPTS) - - with patch.dict(fileclient.__opts__, patched_opts): - client = fileclient.get_file_client(fileclient.__opts__, pillar=False) - for saltenv in SALTENVS: - self.assertTrue( - client.cache_file("salt://foo.txt", saltenv, cachedir=None) - ) - cache_loc = os.path.join( - fileclient.__opts__["cachedir"], "files", saltenv, "foo.txt" - ) - # Double check that the content of the cached file identifies - # it as being from the correct saltenv. The setUp function - # creates the file with the name of the saltenv mentioned in - # the file, so a simple 'in' check is sufficient here. If - # opening the file raises an exception, this is a problem, so - # we are not catching the exception and letting it be raised so - # that the test fails. - with salt.utils.files.fopen(cache_loc) as fp_: - content = fp_.read() - log.debug("cache_loc = %s", cache_loc) - log.debug("content = %s", content) - self.assertTrue(saltenv in content) - - def test_cache_file_with_alternate_cachedir_and_absolute_path(self): - """ - Ensure file is cached to correct location when an alternate cachedir is - specified and that cachedir is an absolute path - """ - patched_opts = {x: y for x, y in self.minion_opts.items()} - patched_opts.update(self.MOCKED_OPTS) - alt_cachedir = os.path.join(RUNTIME_VARS.TMP, "abs_cachedir") - - with patch.dict(fileclient.__opts__, patched_opts): - client = fileclient.get_file_client(fileclient.__opts__, pillar=False) - for saltenv in SALTENVS: - self.assertTrue( - client.cache_file("salt://foo.txt", saltenv, cachedir=alt_cachedir) - ) - cache_loc = os.path.join(alt_cachedir, "files", saltenv, "foo.txt") - # Double check that the content of the cached file identifies - # it as being from the correct saltenv. The setUp function - # creates the file with the name of the saltenv mentioned in - # the file, so a simple 'in' check is sufficient here. If - # opening the file raises an exception, this is a problem, so - # we are not catching the exception and letting it be raised so - # that the test fails. - with salt.utils.files.fopen(cache_loc) as fp_: - content = fp_.read() - log.debug("cache_loc = %s", cache_loc) - log.debug("content = %s", content) - self.assertTrue(saltenv in content) - - def test_cache_file_with_alternate_cachedir_and_relative_path(self): - """ - Ensure file is cached to correct location when an alternate cachedir is - specified and that cachedir is a relative path - """ - patched_opts = {x: y for x, y in self.minion_opts.items()} - patched_opts.update(self.MOCKED_OPTS) - alt_cachedir = "foo" - - with patch.dict(fileclient.__opts__, patched_opts): - client = fileclient.get_file_client(fileclient.__opts__, pillar=False) - for saltenv in SALTENVS: - self.assertTrue( - client.cache_file("salt://foo.txt", saltenv, cachedir=alt_cachedir) - ) - cache_loc = os.path.join( - fileclient.__opts__["cachedir"], - alt_cachedir, - "files", - saltenv, - "foo.txt", - ) - # Double check that the content of the cached file identifies - # it as being from the correct saltenv. The setUp function - # creates the file with the name of the saltenv mentioned in - # the file, so a simple 'in' check is sufficient here. If - # opening the file raises an exception, this is a problem, so - # we are not catching the exception and letting it be raised so - # that the test fails. - with salt.utils.files.fopen(cache_loc) as fp_: - content = fp_.read() - log.debug("cache_loc = %s", cache_loc) - log.debug("content = %s", content) - self.assertTrue(saltenv in content) - - def test_cache_dest(self): - """ - Tests functionality for cache_dest - """ - patched_opts = {x: y for x, y in self.minion_opts.items()} - patched_opts.update(self.MOCKED_OPTS) - - relpath = "foo.com/bar.txt" - cachedir = self.minion_opts["cachedir"] - - def _external(saltenv="base"): - return salt.utils.path.join( - patched_opts["cachedir"], "extrn_files", saltenv, relpath - ) - - def _salt(saltenv="base"): - return salt.utils.path.join( - patched_opts["cachedir"], "files", saltenv, relpath - ) - - def _check(ret, expected): - assert ret == expected, "{} != {}".format(ret, expected) - - with patch.dict(fileclient.__opts__, patched_opts): - client = fileclient.get_file_client(fileclient.__opts__, pillar=False) - - _check(client.cache_dest("https://" + relpath), _external()) - - _check(client.cache_dest("https://" + relpath, "dev"), _external("dev")) - - _check(client.cache_dest("salt://" + relpath), _salt()) - - _check(client.cache_dest("salt://" + relpath, "dev"), _salt("dev")) - - _check( - client.cache_dest("salt://" + relpath + "?saltenv=dev"), _salt("dev") - ) - - _check("/foo/bar", "/foo/bar") diff --git a/tests/unit/test_loader.py b/tests/unit/test_loader.py index cf339033200b..067130620b3b 100644 --- a/tests/unit/test_loader.py +++ b/tests/unit/test_loader.py @@ -8,7 +8,6 @@ import collections import compileall import copy -import imp import inspect import logging import os @@ -35,15 +34,15 @@ def remove_bytecode(module_path): paths = [module_path + "c"] - if hasattr(imp, "get_tag"): - modname, ext = os.path.splitext(module_path.split(os.sep)[-1]) - paths.append( - os.path.join( - os.path.dirname(module_path), - "__pycache__", - "{}.{}.pyc".format(modname, imp.get_tag()), - ) + cache_tag = sys.implementation.cache_tag + modname, ext = os.path.splitext(module_path.split(os.sep)[-1]) + paths.append( + os.path.join( + os.path.dirname(module_path), + "__pycache__", + f"{modname}.{cache_tag}.pyc", ) + ) for path in paths: if os.path.exists(path): os.unlink(path) @@ -84,9 +83,7 @@ def setUp(self): # Setup the module self.module_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) self.addCleanup(shutil.rmtree, self.module_dir, ignore_errors=True) - self.module_file = os.path.join( - self.module_dir, "{}.py".format(self.module_name) - ) + self.module_file = os.path.join(self.module_dir, f"{self.module_name}.py") with salt.utils.files.fopen(self.module_file, "w") as fh: fh.write(salt.utils.stringutils.to_str(loader_template)) fh.flush() @@ -163,16 +160,14 @@ def setUpClass(cls): def setUp(self): # Setup the module self.module_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) - self.module_file = os.path.join( - self.module_dir, "{}.py".format(self.module_name) - ) + self.module_file = os.path.join(self.module_dir, f"{self.module_name}.py") with salt.utils.files.fopen(self.module_file, "w") as fh: fh.write(salt.utils.stringutils.to_str(loader_template_module)) fh.flush() os.fsync(fh.fileno()) self.utils_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) - self.utils_file = os.path.join(self.utils_dir, "{}.py".format(self.utils_name)) + self.utils_file = os.path.join(self.utils_dir, f"{self.utils_name}.py") with salt.utils.files.fopen(self.utils_file, "w") as fh: fh.write(salt.utils.stringutils.to_str(loader_template_utils)) fh.flush() @@ -516,7 +511,7 @@ def test_single_item_no_dot(self): Checks that a KeyError is raised when the function key does not contain a '.' """ key = "testing_no_dot" - expected = "The key '{}' should contain a '.'".format(key) + expected = f"The key '{key}' should contain a '.'" with self.assertRaises(KeyError) as err: inspect.isfunction(self.loader["testing_no_dot"]) @@ -619,7 +614,7 @@ def rm_module(self): @property def module_path(self): - return os.path.join(self.tmp_dir, "{}.py".format(self.module_name)) + return os.path.join(self.tmp_dir, f"{self.module_name}.py") @pytest.mark.slow_test def test_alias(self): @@ -630,17 +625,15 @@ def test_alias(self): self.assertNotIn(self.module_key, self.loader) self.update_module() - self.assertNotIn("{}.test_alias".format(self.module_name), self.loader) + self.assertNotIn(f"{self.module_name}.test_alias", self.loader) self.assertTrue( isinstance( - self.loader["{}.working_alias".format(self.module_name)], + self.loader[f"{self.module_name}.working_alias"], salt.loader.lazy.LoadedFunc, ) ) self.assertTrue( - inspect.isfunction( - self.loader["{}.working_alias".format(self.module_name)].func - ) + inspect.isfunction(self.loader[f"{self.module_name}.working_alias"].func) ) @pytest.mark.slow_test @@ -802,7 +795,7 @@ def update_module(self): @property def module_path(self): - return os.path.join(self.tmp_dir, "{}.py".format(self.module_name)) + return os.path.join(self.tmp_dir, f"{self.module_name}.py") @pytest.mark.slow_test def test_virtual_alias(self): @@ -1199,7 +1192,7 @@ def setUp(self): "__salt__": self.minion_mods, }, ) - self.assertIn("{}.top".format(self.module_name), self.loader) + self.assertIn(f"{self.module_name}.top", self.loader) def tearDown(self): del self.tmp_dir @@ -1241,7 +1234,7 @@ def update_lib(self, lib_name): @pytest.mark.slow_test def test_basic(self): - self.assertIn("{}.top".format(self.module_name), self.loader) + self.assertIn(f"{self.module_name}.top", self.loader) def _verify_libs(self): for lib in self.libs: @@ -1549,9 +1542,7 @@ def setUp(self): # Setup the module self.module_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) self.addCleanup(shutil.rmtree, self.module_dir, ignore_errors=True) - self.module_file = os.path.join( - self.module_dir, "{}.py".format(self.module_name) - ) + self.module_file = os.path.join(self.module_dir, f"{self.module_name}.py") def tearDown(self): try: @@ -1585,7 +1576,7 @@ def _expected(self, optimize=0): return "lazyloadertest.cpython-{}{}{}.pyc".format( sys.version_info[0], sys.version_info[1], - "" if not optimize else ".opt-{}".format(optimize), + "" if not optimize else f".opt-{optimize}", ) def _write_module_file(self): diff --git a/tests/unit/test_log.py b/tests/unit/test_log.py deleted file mode 100644 index 5ce3f697afc4..000000000000 --- a/tests/unit/test_log.py +++ /dev/null @@ -1,178 +0,0 @@ -""" - :codeauthor: Pedro Algarvio (pedro@algarvio.me) - - - tests.unit.log_test - ~~~~~~~~~~~~~~~~~~~ - - Test salt's "hacked" logging -""" - - -import io -import logging - -from salt._logging.handlers import StreamHandler -from salt._logging.impl import SaltLoggingClass -from tests.support.case import TestCase -from tests.support.helpers import TstSuiteLoggingHandler - - -class TestLog(TestCase): - """ - Test several logging settings - """ - - def test_issue_2853_regex_TypeError(self): - # Now, python's logging logger class is ours. - # Let's make sure we have at least one instance - log = SaltLoggingClass(__name__) - - # Test for a format which includes digits in name formatting. - log_format = "[%(name)-15s] %(message)s" - handler = TstSuiteLoggingHandler(format=log_format) - log.addHandler(handler) - - # Trigger TstSuiteLoggingHandler.__enter__ - with handler: - # Let's create another log instance to trigger salt's logging class - # calculations. - try: - SaltLoggingClass("{}.with_digits".format(__name__)) - except Exception as err: # pylint: disable=broad-except - raise AssertionError( - "No exception should have been raised: {}".format(err) - ) - - # Remove the testing handler - log.removeHandler(handler) - - # Test for a format which does not include digits in name formatting. - log_format = "[%(name)s] %(message)s" - handler = TstSuiteLoggingHandler(format=log_format) - log.addHandler(handler) - - # Trigger TstSuiteLoggingHandler.__enter__ - with handler: - # Let's create another log instance to trigger salt's logging class - # calculations. - try: - SaltLoggingClass("{}.without_digits".format(__name__)) - except Exception as err: # pylint: disable=broad-except - raise AssertionError( - "No exception should have been raised: {}".format(err) - ) - - # Remove the testing handler - log.removeHandler(handler) - - def test_exc_info_on_loglevel(self): - def raise_exception_on_purpose(): - 1 / 0 # pylint: disable=pointless-statement - - log = SaltLoggingClass(__name__) - - # Only stream2 should contain the traceback - stream1 = io.StringIO() - stream2 = io.StringIO() - handler1 = StreamHandler(stream1) - handler2 = StreamHandler(stream2) - - handler1.setLevel(logging.INFO) - handler2.setLevel(logging.DEBUG) - - log.addHandler(handler1) - log.addHandler(handler2) - - try: - raise_exception_on_purpose() - except ZeroDivisionError as exc: - log.error( - "Exception raised on purpose caught: ZeroDivisionError", - exc_info_on_loglevel=logging.DEBUG, - ) - - try: - self.assertIn( - "Exception raised on purpose caught: ZeroDivisionError", - stream1.getvalue(), - ) - self.assertNotIn("Traceback (most recent call last)", stream1.getvalue()) - self.assertIn( - "Exception raised on purpose caught: ZeroDivisionError", - stream2.getvalue(), - ) - self.assertIn("Traceback (most recent call last)", stream2.getvalue()) - finally: - log.removeHandler(handler1) - log.removeHandler(handler2) - - # Both streams should contain the traceback - stream1 = io.StringIO() - stream2 = io.StringIO() - handler1 = StreamHandler(stream1) - handler2 = StreamHandler(stream2) - - handler1.setLevel(logging.INFO) - handler2.setLevel(logging.DEBUG) - - log.addHandler(handler1) - log.addHandler(handler2) - - try: - raise_exception_on_purpose() - except ZeroDivisionError as exc: - log.error( - "Exception raised on purpose caught: ZeroDivisionError", - exc_info_on_loglevel=logging.INFO, - ) - - try: - self.assertIn( - "Exception raised on purpose caught: ZeroDivisionError", - stream1.getvalue(), - ) - self.assertIn("Traceback (most recent call last)", stream1.getvalue()) - self.assertIn( - "Exception raised on purpose caught: ZeroDivisionError", - stream2.getvalue(), - ) - self.assertIn("Traceback (most recent call last)", stream2.getvalue()) - finally: - log.removeHandler(handler1) - log.removeHandler(handler2) - - # No streams should contain the traceback - stream1 = io.StringIO() - stream2 = io.StringIO() - handler1 = StreamHandler(stream1) - handler2 = StreamHandler(stream2) - - handler1.setLevel(logging.ERROR) - handler2.setLevel(logging.INFO) - - log.addHandler(handler1) - log.addHandler(handler2) - - try: - raise_exception_on_purpose() - except ZeroDivisionError as exc: - log.error( - "Exception raised on purpose caught: ZeroDivisionError", - exc_info_on_loglevel=logging.DEBUG, - ) - - try: - self.assertIn( - "Exception raised on purpose caught: ZeroDivisionError", - stream1.getvalue(), - ) - self.assertNotIn("Traceback (most recent call last)", stream1.getvalue()) - self.assertIn( - "Exception raised on purpose caught: ZeroDivisionError", - stream2.getvalue(), - ) - self.assertNotIn("Traceback (most recent call last)", stream2.getvalue()) - finally: - log.removeHandler(handler1) - log.removeHandler(handler2) diff --git a/tests/unit/test_master.py b/tests/unit/test_master.py index 26c0bdb19ae0..b454882f06c5 100644 --- a/tests/unit/test_master.py +++ b/tests/unit/test_master.py @@ -681,7 +681,9 @@ class MaintenanceTestCase(TestCase, AdaptedConfigurationTestCaseMixin): """ def setUp(self): - opts = self.get_temp_config("master", git_pillar_update_interval=180) + opts = self.get_temp_config( + "master", git_pillar_update_interval=180, maintenance_interval=181 + ) self.main_class = salt.master.Maintenance(opts) self.main_class._after_fork_methods = self.main_class._finalize_methods = [] @@ -758,10 +760,10 @@ def __call__(self, *args, **kwargs): self.assertEqual(str(exc), "Time passes") self.assertEqual(mocked_time._calls, [60] * 4) self.assertEqual(mocked__post_fork_init.call_times, [0]) - self.assertEqual(mocked_clean_old_jobs.call_times, [60, 120, 180]) - self.assertEqual(mocked_clean_expired_tokens.call_times, [60, 120, 180]) - self.assertEqual(mocked_clean_pub_auth.call_times, [60, 120, 180]) - self.assertEqual(mocked_handle_git_pillar.call_times, [0, 180]) + self.assertEqual(mocked_clean_old_jobs.call_times, [0, 120, 180]) + self.assertEqual(mocked_clean_expired_tokens.call_times, [0, 120, 180]) + self.assertEqual(mocked_clean_pub_auth.call_times, [0, 120, 180]) + self.assertEqual(mocked_handle_git_pillar.call_times, [0]) self.assertEqual(mocked_handle_schedule.call_times, [0, 60, 120, 180]) self.assertEqual(mocked_handle_key_cache.call_times, [0, 60, 120, 180]) self.assertEqual(mocked_handle_presence.call_times, [0, 60, 120, 180]) diff --git a/tests/unit/test_template.py b/tests/unit/test_template.py deleted file mode 100644 index 5462b1454725..000000000000 --- a/tests/unit/test_template.py +++ /dev/null @@ -1,110 +0,0 @@ -""" - :codeauthor: :email: `Mike Place ` -""" - - -import io - -from salt import template -from tests.support.mock import MagicMock -from tests.support.unit import TestCase - - -class TemplateTestCase(TestCase): - - render_dict = { - "jinja": "fake_jinja_func", - "json": "fake_json_func", - "mako": "fake_make_func", - } - - def test_compile_template_bad_type(self): - """ - Test to ensure that unsupported types cannot be passed to the template compiler - """ - ret = template.compile_template(["1", "2", "3"], None, None, None, None) - self.assertDictEqual(ret, {}) - - def test_compile_template_preserves_windows_newlines(self): - """ - Test to ensure that a file with Windows newlines, when rendered by a - template renderer, does not eat the CR character. - """ - - def _get_rend(renderer, value): - """ - We need a new MagicMock each time since we're dealing with StringIO - objects which are read like files. - """ - return {renderer: MagicMock(return_value=io.StringIO(value))} - - input_data_windows = "foo\r\nbar\r\nbaz\r\n" - input_data_non_windows = input_data_windows.replace("\r\n", "\n") - renderer = "test" - blacklist = whitelist = [] - - ret = template.compile_template( - ":string:", - _get_rend(renderer, input_data_non_windows), - renderer, - blacklist, - whitelist, - input_data=input_data_windows, - ).read() - # Even though the mocked renderer returned a string without the windows - # newlines, the compiled template should still have them. - self.assertEqual(ret, input_data_windows) - - # Now test that we aren't adding them in unnecessarily. - ret = template.compile_template( - ":string:", - _get_rend(renderer, input_data_non_windows), - renderer, - blacklist, - whitelist, - input_data=input_data_non_windows, - ).read() - self.assertEqual(ret, input_data_non_windows) - - # Finally, ensure that we're not unnecessarily replacing the \n with - # \r\n in the event that the renderer returned a string with the - # windows newlines intact. - ret = template.compile_template( - ":string:", - _get_rend(renderer, input_data_windows), - renderer, - blacklist, - whitelist, - input_data=input_data_windows, - ).read() - self.assertEqual(ret, input_data_windows) - - def test_check_render_pipe_str(self): - """ - Check that all renderers specified in the pipe string are available. - """ - ret = template.check_render_pipe_str("jinja|json", self.render_dict, None, None) - self.assertIn(("fake_jinja_func", ""), ret) - self.assertIn(("fake_json_func", ""), ret) - self.assertNotIn(("OBVIOUSLY_NOT_HERE", ""), ret) - - def test_check_renderer_blacklisting(self): - """ - Check that all renderers specified in the pipe string are available. - """ - ret = template.check_render_pipe_str( - "jinja|json", self.render_dict, ["jinja"], None - ) - self.assertListEqual([("fake_json_func", "")], ret) - ret = template.check_render_pipe_str( - "jinja|json", self.render_dict, None, ["jinja"] - ) - self.assertListEqual([("fake_jinja_func", "")], ret) - ret = template.check_render_pipe_str( - "jinja|json", self.render_dict, ["jinja"], ["jinja"] - ) - self.assertListEqual([], ret) - ret = template.check_render_pipe_str( - "jinja|json", self.render_dict, ["jinja"], ["jinja", "json"] - ) - self.assertListEqual([("fake_json_func", "")], ret) diff --git a/tests/unit/utils/test_boto3mod.py b/tests/unit/utils/test_boto3mod.py index 74f6478e2726..0a9509ab5987 100644 --- a/tests/unit/utils/test_boto3mod.py +++ b/tests/unit/utils/test_boto3mod.py @@ -24,6 +24,10 @@ REQUIRED_BOTO3_VERSION = "1.2.1" +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + @pytest.mark.skipif(HAS_BOTO3 is False, reason="The boto module must be installed.") @pytest.mark.skipif( diff --git a/tests/unit/utils/test_botomod.py b/tests/unit/utils/test_botomod.py index bf3ca37a837d..3e67cbec6985 100644 --- a/tests/unit/utils/test_botomod.py +++ b/tests/unit/utils/test_botomod.py @@ -53,6 +53,11 @@ def stub_function(self): return stub_function +pytestmark = [ + pytest.mark.skip_on_fips_enabled_platform, +] + + required_boto_version = "2.0.0" required_boto3_version = "1.2.1" region = "us-east-1" diff --git a/tests/unit/utils/test_data.py b/tests/unit/utils/test_data.py deleted file mode 100644 index 6d16e1989993..000000000000 --- a/tests/unit/utils/test_data.py +++ /dev/null @@ -1,1506 +0,0 @@ -""" -Tests for salt.utils.data -""" - -import builtins -import logging - -import salt.utils.data -import salt.utils.stringutils -from salt.utils.odict import OrderedDict -from tests.support.mock import patch -from tests.support.unit import LOREM_IPSUM, TestCase - -log = logging.getLogger(__name__) -_b = lambda x: x.encode("utf-8") -_s = lambda x: salt.utils.stringutils.to_str(x, normalize=True) -# Some randomized data that will not decode -BYTES = b"1\x814\x10" - -# This is an example of a unicode string with й constructed using two separate -# code points. Do not modify it. -EGGS = "\u044f\u0438\u0306\u0446\u0430" - - -class DataTestCase(TestCase): - test_data = [ - "unicode_str", - _b("питон"), - 123, - 456.789, - True, - False, - None, - EGGS, - BYTES, - [123, 456.789, _b("спам"), True, False, None, EGGS, BYTES], - (987, 654.321, _b("яйца"), EGGS, None, (True, EGGS, BYTES)), - { - _b("str_key"): _b("str_val"), - None: True, - 123: 456.789, - EGGS: BYTES, - _b("subdict"): { - "unicode_key": EGGS, - _b("tuple"): (123, "hello", _b("world"), True, EGGS, BYTES), - _b("list"): [456, _b("спам"), False, EGGS, BYTES], - }, - }, - OrderedDict([(_b("foo"), "bar"), (123, 456), (EGGS, BYTES)]), - ] - - def test_sorted_ignorecase(self): - test_list = ["foo", "Foo", "bar", "Bar"] - expected_list = ["bar", "Bar", "foo", "Foo"] - self.assertEqual(salt.utils.data.sorted_ignorecase(test_list), expected_list) - - def test_mysql_to_dict(self): - test_mysql_output = [ - "+----+------+-----------+------+---------+------+-------+------------------+", - "| Id | User | Host | db | Command | Time | State | Info " - " |", - "+----+------+-----------+------+---------+------+-------+------------------+", - "| 7 | root | localhost | NULL | Query | 0 | init | show" - " processlist |", - "+----+------+-----------+------+---------+------+-------+------------------+", - ] - - ret = salt.utils.data.mysql_to_dict(test_mysql_output, "Info") - expected_dict = { - "show processlist": { - "Info": "show processlist", - "db": "NULL", - "State": "init", - "Host": "localhost", - "Command": "Query", - "User": "root", - "Time": 0, - "Id": 7, - } - } - - self.assertDictEqual(ret, expected_dict) - - def test_subdict_match(self): - test_two_level_dict = {"foo": {"bar": "baz"}} - test_two_level_comb_dict = {"foo": {"bar": "baz:woz"}} - test_two_level_dict_and_list = { - "abc": ["def", "ghi", {"lorem": {"ipsum": [{"dolor": "sit"}]}}], - } - test_three_level_dict = {"a": {"b": {"c": "v"}}} - - self.assertTrue( - salt.utils.data.subdict_match(test_two_level_dict, "foo:bar:baz") - ) - # In test_two_level_comb_dict, 'foo:bar' corresponds to 'baz:woz', not - # 'baz'. This match should return False. - self.assertFalse( - salt.utils.data.subdict_match(test_two_level_comb_dict, "foo:bar:baz") - ) - # This tests matching with the delimiter in the value part (in other - # words, that the path 'foo:bar' corresponds to the string 'baz:woz'). - self.assertTrue( - salt.utils.data.subdict_match(test_two_level_comb_dict, "foo:bar:baz:woz") - ) - # This would match if test_two_level_comb_dict['foo']['bar'] was equal - # to 'baz:woz:wiz', or if there was more deep nesting. But it does not, - # so this should return False. - self.assertFalse( - salt.utils.data.subdict_match( - test_two_level_comb_dict, "foo:bar:baz:woz:wiz" - ) - ) - # This tests for cases when a key path corresponds to a list. The - # value part 'ghi' should be successfully matched as it is a member of - # the list corresponding to key path 'abc'. It is somewhat a - # duplication of a test within test_traverse_dict_and_list, but - # salt.utils.data.subdict_match() does more than just invoke - # salt.utils.traverse_list_and_dict() so this particular assertion is a - # sanity check. - self.assertTrue( - salt.utils.data.subdict_match(test_two_level_dict_and_list, "abc:ghi") - ) - # This tests the use case of a dict embedded in a list, embedded in a - # list, embedded in a dict. This is a rather absurd case, but it - # confirms that match recursion works properly. - self.assertTrue( - salt.utils.data.subdict_match( - test_two_level_dict_and_list, "abc:lorem:ipsum:dolor:sit" - ) - ) - # Test four level dict match for reference - self.assertTrue(salt.utils.data.subdict_match(test_three_level_dict, "a:b:c:v")) - # Test regression in 2015.8 where 'a:c:v' would match 'a:b:c:v' - self.assertFalse(salt.utils.data.subdict_match(test_three_level_dict, "a:c:v")) - # Test wildcard match - self.assertTrue(salt.utils.data.subdict_match(test_three_level_dict, "a:*:c:v")) - - def test_subdict_match_with_wildcards(self): - """ - Tests subdict matching when wildcards are used in the expression - """ - data = {"a": {"b": {"ç": "d", "é": ["eff", "gee", "8ch"], "ĩ": {"j": "k"}}}} - assert salt.utils.data.subdict_match(data, "*:*:*:*") - assert salt.utils.data.subdict_match(data, "a:*:*:*") - assert salt.utils.data.subdict_match(data, "a:b:*:*") - assert salt.utils.data.subdict_match(data, "a:b:ç:*") - assert salt.utils.data.subdict_match(data, "a:b:*:d") - assert salt.utils.data.subdict_match(data, "a:*:ç:d") - assert salt.utils.data.subdict_match(data, "*:b:ç:d") - assert salt.utils.data.subdict_match(data, "*:*:ç:d") - assert salt.utils.data.subdict_match(data, "*:*:*:d") - assert salt.utils.data.subdict_match(data, "a:*:*:d") - assert salt.utils.data.subdict_match(data, "a:b:*:ef*") - assert salt.utils.data.subdict_match(data, "a:b:*:g*") - assert salt.utils.data.subdict_match(data, "a:b:*:j:*") - assert salt.utils.data.subdict_match(data, "a:b:*:j:k") - assert salt.utils.data.subdict_match(data, "a:b:*:*:k") - assert salt.utils.data.subdict_match(data, "a:b:*:*:*") - - def test_traverse_dict(self): - test_two_level_dict = {"foo": {"bar": "baz"}} - - self.assertDictEqual( - {"not_found": "nope"}, - salt.utils.data.traverse_dict( - test_two_level_dict, "foo:bar:baz", {"not_found": "nope"} - ), - ) - self.assertEqual( - "baz", - salt.utils.data.traverse_dict( - test_two_level_dict, "foo:bar", {"not_found": "not_found"} - ), - ) - - def test_traverse_dict_and_list(self): - test_two_level_dict = {"foo": {"bar": "baz"}} - test_two_level_dict_and_list = { - "foo": ["bar", "baz", {"lorem": {"ipsum": [{"dolor": "sit"}]}}] - } - - # Check traversing too far: salt.utils.data.traverse_dict_and_list() returns - # the value corresponding to a given key path, and baz is a value - # corresponding to the key path foo:bar. - self.assertDictEqual( - {"not_found": "nope"}, - salt.utils.data.traverse_dict_and_list( - test_two_level_dict, "foo:bar:baz", {"not_found": "nope"} - ), - ) - # Now check to ensure that foo:bar corresponds to baz - self.assertEqual( - "baz", - salt.utils.data.traverse_dict_and_list( - test_two_level_dict, "foo:bar", {"not_found": "not_found"} - ), - ) - # Check traversing too far - self.assertDictEqual( - {"not_found": "nope"}, - salt.utils.data.traverse_dict_and_list( - test_two_level_dict_and_list, "foo:bar", {"not_found": "nope"} - ), - ) - # Check index 1 (2nd element) of list corresponding to path 'foo' - self.assertEqual( - "baz", - salt.utils.data.traverse_dict_and_list( - test_two_level_dict_and_list, "foo:1", {"not_found": "not_found"} - ), - ) - # Traverse a couple times into dicts embedded in lists - self.assertEqual( - "sit", - salt.utils.data.traverse_dict_and_list( - test_two_level_dict_and_list, - "foo:lorem:ipsum:dolor", - {"not_found": "not_found"}, - ), - ) - - # Traverse and match integer key in a nested dict - # https://github.com/saltstack/salt/issues/56444 - self.assertEqual( - "it worked", - salt.utils.data.traverse_dict_and_list( - {"foo": {1234: "it worked"}}, - "foo:1234", - "it didn't work", - ), - ) - # Make sure that we properly return the default value when the initial - # attempt fails and YAML-loading the target key doesn't change its - # value. - self.assertEqual( - "default", - salt.utils.data.traverse_dict_and_list( - {"foo": {"baz": "didn't work"}}, - "foo:bar", - "default", - ), - ) - - def test_issue_39709(self): - test_two_level_dict_and_list = { - "foo": ["bar", "baz", {"lorem": {"ipsum": [{"dolor": "sit"}]}}] - } - - self.assertEqual( - "sit", - salt.utils.data.traverse_dict_and_list( - test_two_level_dict_and_list, - ["foo", "lorem", "ipsum", "dolor"], - {"not_found": "not_found"}, - ), - ) - - def test_compare_dicts(self): - ret = salt.utils.data.compare_dicts(old={"foo": "bar"}, new={"foo": "bar"}) - self.assertEqual(ret, {}) - - ret = salt.utils.data.compare_dicts(old={"foo": "bar"}, new={"foo": "woz"}) - expected_ret = {"foo": {"new": "woz", "old": "bar"}} - self.assertDictEqual(ret, expected_ret) - - def test_compare_lists_no_change(self): - ret = salt.utils.data.compare_lists( - old=[1, 2, 3, "a", "b", "c"], new=[1, 2, 3, "a", "b", "c"] - ) - expected = {} - self.assertDictEqual(ret, expected) - - def test_compare_lists_changes(self): - ret = salt.utils.data.compare_lists( - old=[1, 2, 3, "a", "b", "c"], new=[1, 2, 4, "x", "y", "z"] - ) - expected = {"new": [4, "x", "y", "z"], "old": [3, "a", "b", "c"]} - self.assertDictEqual(ret, expected) - - def test_compare_lists_changes_new(self): - ret = salt.utils.data.compare_lists(old=[1, 2, 3], new=[1, 2, 3, "x", "y", "z"]) - expected = {"new": ["x", "y", "z"]} - self.assertDictEqual(ret, expected) - - def test_compare_lists_changes_old(self): - ret = salt.utils.data.compare_lists(old=[1, 2, 3, "a", "b", "c"], new=[1, 2, 3]) - expected = {"old": ["a", "b", "c"]} - self.assertDictEqual(ret, expected) - - def test_decode(self): - """ - Companion to test_decode_to_str, they should both be kept up-to-date - with one another. - - NOTE: This uses the lambda "_b" defined above in the global scope, - which encodes a string to a bytestring, assuming utf-8. - """ - expected = [ - "unicode_str", - "питон", - 123, - 456.789, - True, - False, - None, - "яйца", - BYTES, - [123, 456.789, "спам", True, False, None, "яйца", BYTES], - (987, 654.321, "яйца", "яйца", None, (True, "яйца", BYTES)), - { - "str_key": "str_val", - None: True, - 123: 456.789, - "яйца": BYTES, - "subdict": { - "unicode_key": "яйца", - "tuple": (123, "hello", "world", True, "яйца", BYTES), - "list": [456, "спам", False, "яйца", BYTES], - }, - }, - OrderedDict([("foo", "bar"), (123, 456), ("яйца", BYTES)]), - ] - - ret = salt.utils.data.decode( - self.test_data, - keep=True, - normalize=True, - preserve_dict_class=True, - preserve_tuples=True, - ) - self.assertEqual(ret, expected) - - # The binary data in the data structure should fail to decode, even - # using the fallback, and raise an exception. - self.assertRaises( - UnicodeDecodeError, - salt.utils.data.decode, - self.test_data, - keep=False, - normalize=True, - preserve_dict_class=True, - preserve_tuples=True, - ) - - # Now munge the expected data so that we get what we would expect if we - # disable preservation of dict class and tuples - expected[10] = [987, 654.321, "яйца", "яйца", None, [True, "яйца", BYTES]] - expected[11]["subdict"]["tuple"] = [123, "hello", "world", True, "яйца", BYTES] - expected[12] = {"foo": "bar", 123: 456, "яйца": BYTES} - - ret = salt.utils.data.decode( - self.test_data, - keep=True, - normalize=True, - preserve_dict_class=False, - preserve_tuples=False, - ) - self.assertEqual(ret, expected) - - # Now test single non-string, non-data-structure items, these should - # return the same value when passed to this function - for item in (123, 4.56, True, False, None): - log.debug("Testing decode of %s", item) - self.assertEqual(salt.utils.data.decode(item), item) - - # Test single strings (not in a data structure) - self.assertEqual(salt.utils.data.decode("foo"), "foo") - self.assertEqual(salt.utils.data.decode(_b("bar")), "bar") - self.assertEqual(salt.utils.data.decode(EGGS, normalize=True), "яйца") - self.assertEqual(salt.utils.data.decode(EGGS, normalize=False), EGGS) - - # Test binary blob - self.assertEqual(salt.utils.data.decode(BYTES, keep=True), BYTES) - self.assertRaises(UnicodeDecodeError, salt.utils.data.decode, BYTES, keep=False) - - def test_circular_refs_dicts(self): - test_dict = {"key": "value", "type": "test1"} - test_dict["self"] = test_dict - ret = salt.utils.data._remove_circular_refs(ob=test_dict) - self.assertDictEqual(ret, {"key": "value", "type": "test1", "self": None}) - - def test_circular_refs_lists(self): - test_list = { - "foo": [], - } - test_list["foo"].append((test_list,)) - ret = salt.utils.data._remove_circular_refs(ob=test_list) - self.assertDictEqual(ret, {"foo": [(None,)]}) - - def test_circular_refs_tuple(self): - test_dup = {"foo": "string 1", "bar": "string 1", "ham": 1, "spam": 1} - ret = salt.utils.data._remove_circular_refs(ob=test_dup) - self.assertDictEqual( - ret, {"foo": "string 1", "bar": "string 1", "ham": 1, "spam": 1} - ) - - def test_decode_to_str(self): - """ - Companion to test_decode, they should both be kept up-to-date with one - another. - - NOTE: This uses the lambda "_s" defined above in the global scope, - which converts the string/bytestring to a str type. - """ - expected = [ - _s("unicode_str"), - _s("питон"), - 123, - 456.789, - True, - False, - None, - _s("яйца"), - BYTES, - [123, 456.789, _s("спам"), True, False, None, _s("яйца"), BYTES], - (987, 654.321, _s("яйца"), _s("яйца"), None, (True, _s("яйца"), BYTES)), - { - _s("str_key"): _s("str_val"), - None: True, - 123: 456.789, - _s("яйца"): BYTES, - _s("subdict"): { - _s("unicode_key"): _s("яйца"), - _s("tuple"): ( - 123, - _s("hello"), - _s("world"), - True, - _s("яйца"), - BYTES, - ), - _s("list"): [456, _s("спам"), False, _s("яйца"), BYTES], - }, - }, - OrderedDict([(_s("foo"), _s("bar")), (123, 456), (_s("яйца"), BYTES)]), - ] - - ret = salt.utils.data.decode( - self.test_data, - keep=True, - normalize=True, - preserve_dict_class=True, - preserve_tuples=True, - to_str=True, - ) - self.assertEqual(ret, expected) - - # The binary data in the data structure should fail to decode, even - # using the fallback, and raise an exception. - self.assertRaises( - UnicodeDecodeError, - salt.utils.data.decode, - self.test_data, - keep=False, - normalize=True, - preserve_dict_class=True, - preserve_tuples=True, - to_str=True, - ) - - # Now munge the expected data so that we get what we would expect if we - # disable preservation of dict class and tuples - expected[10] = [ - 987, - 654.321, - _s("яйца"), - _s("яйца"), - None, - [True, _s("яйца"), BYTES], - ] - expected[11][_s("subdict")][_s("tuple")] = [ - 123, - _s("hello"), - _s("world"), - True, - _s("яйца"), - BYTES, - ] - expected[12] = {_s("foo"): _s("bar"), 123: 456, _s("яйца"): BYTES} - - ret = salt.utils.data.decode( - self.test_data, - keep=True, - normalize=True, - preserve_dict_class=False, - preserve_tuples=False, - to_str=True, - ) - self.assertEqual(ret, expected) - - # Now test single non-string, non-data-structure items, these should - # return the same value when passed to this function - for item in (123, 4.56, True, False, None): - log.debug("Testing decode of %s", item) - self.assertEqual(salt.utils.data.decode(item, to_str=True), item) - - # Test single strings (not in a data structure) - self.assertEqual(salt.utils.data.decode("foo", to_str=True), _s("foo")) - self.assertEqual(salt.utils.data.decode(_b("bar"), to_str=True), _s("bar")) - - # Test binary blob - self.assertEqual(salt.utils.data.decode(BYTES, keep=True, to_str=True), BYTES) - self.assertRaises( - UnicodeDecodeError, - salt.utils.data.decode, - BYTES, - keep=False, - to_str=True, - ) - - def test_decode_fallback(self): - """ - Test fallback to utf-8 - """ - with patch.object(builtins, "__salt_system_encoding__", "ascii"): - self.assertEqual(salt.utils.data.decode(_b("яйца")), "яйца") - - def test_encode(self): - """ - NOTE: This uses the lambda "_b" defined above in the global scope, - which encodes a string to a bytestring, assuming utf-8. - """ - expected = [ - _b("unicode_str"), - _b("питон"), - 123, - 456.789, - True, - False, - None, - _b(EGGS), - BYTES, - [123, 456.789, _b("спам"), True, False, None, _b(EGGS), BYTES], - (987, 654.321, _b("яйца"), _b(EGGS), None, (True, _b(EGGS), BYTES)), - { - _b("str_key"): _b("str_val"), - None: True, - 123: 456.789, - _b(EGGS): BYTES, - _b("subdict"): { - _b("unicode_key"): _b(EGGS), - _b("tuple"): (123, _b("hello"), _b("world"), True, _b(EGGS), BYTES), - _b("list"): [456, _b("спам"), False, _b(EGGS), BYTES], - }, - }, - OrderedDict([(_b("foo"), _b("bar")), (123, 456), (_b(EGGS), BYTES)]), - ] - - # Both keep=True and keep=False should work because the BYTES data is - # already bytes. - ret = salt.utils.data.encode( - self.test_data, keep=True, preserve_dict_class=True, preserve_tuples=True - ) - self.assertEqual(ret, expected) - ret = salt.utils.data.encode( - self.test_data, keep=False, preserve_dict_class=True, preserve_tuples=True - ) - self.assertEqual(ret, expected) - - # Now munge the expected data so that we get what we would expect if we - # disable preservation of dict class and tuples - expected[10] = [ - 987, - 654.321, - _b("яйца"), - _b(EGGS), - None, - [True, _b(EGGS), BYTES], - ] - expected[11][_b("subdict")][_b("tuple")] = [ - 123, - _b("hello"), - _b("world"), - True, - _b(EGGS), - BYTES, - ] - expected[12] = {_b("foo"): _b("bar"), 123: 456, _b(EGGS): BYTES} - - ret = salt.utils.data.encode( - self.test_data, keep=True, preserve_dict_class=False, preserve_tuples=False - ) - self.assertEqual(ret, expected) - ret = salt.utils.data.encode( - self.test_data, keep=False, preserve_dict_class=False, preserve_tuples=False - ) - self.assertEqual(ret, expected) - - # Now test single non-string, non-data-structure items, these should - # return the same value when passed to this function - for item in (123, 4.56, True, False, None): - log.debug("Testing encode of %s", item) - self.assertEqual(salt.utils.data.encode(item), item) - - # Test single strings (not in a data structure) - self.assertEqual(salt.utils.data.encode("foo"), _b("foo")) - self.assertEqual(salt.utils.data.encode(_b("bar")), _b("bar")) - - # Test binary blob, nothing should happen even when keep=False since - # the data is already bytes - self.assertEqual(salt.utils.data.encode(BYTES, keep=True), BYTES) - self.assertEqual(salt.utils.data.encode(BYTES, keep=False), BYTES) - - def test_encode_keep(self): - """ - Whereas we tested the keep argument in test_decode, it is much easier - to do a more comprehensive test of keep in its own function where we - can force the encoding. - """ - unicode_str = "питон" - encoding = "ascii" - - # Test single string - self.assertEqual( - salt.utils.data.encode(unicode_str, encoding, keep=True), unicode_str - ) - self.assertRaises( - UnicodeEncodeError, - salt.utils.data.encode, - unicode_str, - encoding, - keep=False, - ) - - data = [ - unicode_str, - [b"foo", [unicode_str], {b"key": unicode_str}, (unicode_str,)], - { - b"list": [b"foo", unicode_str], - b"dict": {b"key": unicode_str}, - b"tuple": (b"foo", unicode_str), - }, - ([b"foo", unicode_str], {b"key": unicode_str}, (unicode_str,)), - ] - - # Since everything was a bytestring aside from the bogus data, the - # return data should be identical. We don't need to test recursive - # decoding, that has already been tested in test_encode. - self.assertEqual( - salt.utils.data.encode(data, encoding, keep=True, preserve_tuples=True), - data, - ) - self.assertRaises( - UnicodeEncodeError, - salt.utils.data.encode, - data, - encoding, - keep=False, - preserve_tuples=True, - ) - - for index, _ in enumerate(data): - self.assertEqual( - salt.utils.data.encode( - data[index], encoding, keep=True, preserve_tuples=True - ), - data[index], - ) - self.assertRaises( - UnicodeEncodeError, - salt.utils.data.encode, - data[index], - encoding, - keep=False, - preserve_tuples=True, - ) - - def test_encode_fallback(self): - """ - Test fallback to utf-8 - """ - with patch.object(builtins, "__salt_system_encoding__", "ascii"): - self.assertEqual(salt.utils.data.encode("яйца"), _b("яйца")) - with patch.object(builtins, "__salt_system_encoding__", "CP1252"): - self.assertEqual(salt.utils.data.encode("Ψ"), _b("Ψ")) - - def test_repack_dict(self): - list_of_one_element_dicts = [ - {"dict_key_1": "dict_val_1"}, - {"dict_key_2": "dict_val_2"}, - {"dict_key_3": "dict_val_3"}, - ] - expected_ret = { - "dict_key_1": "dict_val_1", - "dict_key_2": "dict_val_2", - "dict_key_3": "dict_val_3", - } - ret = salt.utils.data.repack_dictlist(list_of_one_element_dicts) - self.assertDictEqual(ret, expected_ret) - - # Try with yaml - yaml_key_val_pair = "- key1: val1" - ret = salt.utils.data.repack_dictlist(yaml_key_val_pair) - self.assertDictEqual(ret, {"key1": "val1"}) - - # Make sure we handle non-yaml junk data - ret = salt.utils.data.repack_dictlist(LOREM_IPSUM) - self.assertDictEqual(ret, {}) - - def test_stringify(self): - self.assertRaises(TypeError, salt.utils.data.stringify, 9) - self.assertEqual( - salt.utils.data.stringify(["one", "two", "three", 4, 5]), - ["one", "two", "three", "4", "5"], - ) - - def test_json_query(self): - # Raises exception if jmespath module is not found - with patch("salt.utils.data.jmespath", None): - self.assertRaisesRegex( - RuntimeError, "requires jmespath", salt.utils.data.json_query, {}, "@" - ) - - # Test search - user_groups = { - "user1": {"groups": ["group1", "group2", "group3"]}, - "user2": {"groups": ["group1", "group2"]}, - "user3": {"groups": ["group3"]}, - } - expression = "*.groups[0]" - primary_groups = ["group1", "group1", "group3"] - self.assertEqual( - sorted(salt.utils.data.json_query(user_groups, expression)), primary_groups - ) - - -class FilterFalseyTestCase(TestCase): - """ - Test suite for salt.utils.data.filter_falsey - """ - - def test_nop(self): - """ - Test cases where nothing will be done. - """ - # Test with dictionary without recursion - old_dict = { - "foo": "bar", - "bar": {"baz": {"qux": "quux"}}, - "baz": ["qux", {"foo": "bar"}], - } - new_dict = salt.utils.data.filter_falsey(old_dict) - self.assertEqual(old_dict, new_dict) - # Check returned type equality - self.assertIs(type(old_dict), type(new_dict)) - # Test dictionary with recursion - new_dict = salt.utils.data.filter_falsey(old_dict, recurse_depth=3) - self.assertEqual(old_dict, new_dict) - # Test with list - old_list = ["foo", "bar"] - new_list = salt.utils.data.filter_falsey(old_list) - self.assertEqual(old_list, new_list) - # Check returned type equality - self.assertIs(type(old_list), type(new_list)) - # Test with set - old_set = {"foo", "bar"} - new_set = salt.utils.data.filter_falsey(old_set) - self.assertEqual(old_set, new_set) - # Check returned type equality - self.assertIs(type(old_set), type(new_set)) - # Test with OrderedDict - old_dict = OrderedDict( - [ - ("foo", "bar"), - ("bar", OrderedDict([("qux", "quux")])), - ("baz", ["qux", OrderedDict([("foo", "bar")])]), - ] - ) - new_dict = salt.utils.data.filter_falsey(old_dict) - self.assertEqual(old_dict, new_dict) - self.assertIs(type(old_dict), type(new_dict)) - # Test excluding int - old_list = [0] - new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[int]) - self.assertEqual(old_list, new_list) - # Test excluding str (or unicode) (or both) - old_list = [""] - new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[str]) - self.assertEqual(old_list, new_list) - # Test excluding list - old_list = [[]] - new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[type([])]) - self.assertEqual(old_list, new_list) - # Test excluding dict - old_list = [{}] - new_list = salt.utils.data.filter_falsey(old_list, ignore_types=[type({})]) - self.assertEqual(old_list, new_list) - - def test_filter_dict_no_recurse(self): - """ - Test filtering a dictionary without recursing. - This will only filter out key-values where the values are falsey. - """ - old_dict = { - "foo": None, - "bar": {"baz": {"qux": None, "quux": "", "foo": []}}, - "baz": ["qux"], - "qux": {}, - "quux": [], - } - new_dict = salt.utils.data.filter_falsey(old_dict) - expect_dict = { - "bar": {"baz": {"qux": None, "quux": "", "foo": []}}, - "baz": ["qux"], - } - self.assertEqual(expect_dict, new_dict) - self.assertIs(type(expect_dict), type(new_dict)) - - def test_filter_dict_recurse(self): - """ - Test filtering a dictionary with recursing. - This will filter out any key-values where the values are falsey or when - the values *become* falsey after filtering their contents (in case they - are lists or dicts). - """ - old_dict = { - "foo": None, - "bar": {"baz": {"qux": None, "quux": "", "foo": []}}, - "baz": ["qux"], - "qux": {}, - "quux": [], - } - new_dict = salt.utils.data.filter_falsey(old_dict, recurse_depth=3) - expect_dict = {"baz": ["qux"]} - self.assertEqual(expect_dict, new_dict) - self.assertIs(type(expect_dict), type(new_dict)) - - def test_filter_list_no_recurse(self): - """ - Test filtering a list without recursing. - This will only filter out items which are falsey. - """ - old_list = ["foo", None, [], {}, 0, ""] - new_list = salt.utils.data.filter_falsey(old_list) - expect_list = ["foo"] - self.assertEqual(expect_list, new_list) - self.assertIs(type(expect_list), type(new_list)) - # Ensure nested values are *not* filtered out. - old_list = [ - "foo", - ["foo"], - ["foo", None], - {"foo": 0}, - {"foo": "bar", "baz": []}, - [{"foo": ""}], - ] - new_list = salt.utils.data.filter_falsey(old_list) - self.assertEqual(old_list, new_list) - self.assertIs(type(old_list), type(new_list)) - - def test_filter_list_recurse(self): - """ - Test filtering a list with recursing. - This will filter out any items which are falsey, or which become falsey - after filtering their contents (in case they are lists or dicts). - """ - old_list = [ - "foo", - ["foo"], - ["foo", None], - {"foo": 0}, - {"foo": "bar", "baz": []}, - [{"foo": ""}], - ] - new_list = salt.utils.data.filter_falsey(old_list, recurse_depth=3) - expect_list = ["foo", ["foo"], ["foo"], {"foo": "bar"}] - self.assertEqual(expect_list, new_list) - self.assertIs(type(expect_list), type(new_list)) - - def test_filter_set_no_recurse(self): - """ - Test filtering a set without recursing. - Note that a set cannot contain unhashable types, so recursion is not possible. - """ - old_set = {"foo", None, 0, ""} - new_set = salt.utils.data.filter_falsey(old_set) - expect_set = {"foo"} - self.assertEqual(expect_set, new_set) - self.assertIs(type(expect_set), type(new_set)) - - def test_filter_ordereddict_no_recurse(self): - """ - Test filtering an OrderedDict without recursing. - """ - old_dict = OrderedDict( - [ - ("foo", None), - ( - "bar", - OrderedDict( - [ - ( - "baz", - OrderedDict([("qux", None), ("quux", ""), ("foo", [])]), - ) - ] - ), - ), - ("baz", ["qux"]), - ("qux", {}), - ("quux", []), - ] - ) - new_dict = salt.utils.data.filter_falsey(old_dict) - expect_dict = OrderedDict( - [ - ( - "bar", - OrderedDict( - [ - ( - "baz", - OrderedDict([("qux", None), ("quux", ""), ("foo", [])]), - ) - ] - ), - ), - ("baz", ["qux"]), - ] - ) - self.assertEqual(expect_dict, new_dict) - self.assertIs(type(expect_dict), type(new_dict)) - - def test_filter_ordereddict_recurse(self): - """ - Test filtering an OrderedDict with recursing. - """ - old_dict = OrderedDict( - [ - ("foo", None), - ( - "bar", - OrderedDict( - [ - ( - "baz", - OrderedDict([("qux", None), ("quux", ""), ("foo", [])]), - ) - ] - ), - ), - ("baz", ["qux"]), - ("qux", {}), - ("quux", []), - ] - ) - new_dict = salt.utils.data.filter_falsey(old_dict, recurse_depth=3) - expect_dict = OrderedDict([("baz", ["qux"])]) - self.assertEqual(expect_dict, new_dict) - self.assertIs(type(expect_dict), type(new_dict)) - - def test_filter_list_recurse_limit(self): - """ - Test filtering a list with recursing, but with a limited depth. - Note that the top-level is always processed, so a recursion depth of 2 - means that two *additional* levels are processed. - """ - old_list = [None, [None, [None, [None]]]] - new_list = salt.utils.data.filter_falsey(old_list, recurse_depth=2) - self.assertEqual([[[[None]]]], new_list) - - def test_filter_dict_recurse_limit(self): - """ - Test filtering a dict with recursing, but with a limited depth. - Note that the top-level is always processed, so a recursion depth of 2 - means that two *additional* levels are processed. - """ - old_dict = { - "one": None, - "foo": {"two": None, "bar": {"three": None, "baz": {"four": None}}}, - } - new_dict = salt.utils.data.filter_falsey(old_dict, recurse_depth=2) - self.assertEqual({"foo": {"bar": {"baz": {"four": None}}}}, new_dict) - - def test_filter_exclude_types(self): - """ - Test filtering a list recursively, but also ignoring (i.e. not filtering) - out certain types that can be falsey. - """ - # Ignore int, unicode - old_list = [ - "foo", - ["foo"], - ["foo", None], - {"foo": 0}, - {"foo": "bar", "baz": []}, - [{"foo": ""}], - ] - new_list = salt.utils.data.filter_falsey( - old_list, recurse_depth=3, ignore_types=[int, str] - ) - self.assertEqual( - ["foo", ["foo"], ["foo"], {"foo": 0}, {"foo": "bar"}, [{"foo": ""}]], - new_list, - ) - # Ignore list - old_list = [ - "foo", - ["foo"], - ["foo", None], - {"foo": 0}, - {"foo": "bar", "baz": []}, - [{"foo": ""}], - ] - new_list = salt.utils.data.filter_falsey( - old_list, recurse_depth=3, ignore_types=[type([])] - ) - self.assertEqual( - ["foo", ["foo"], ["foo"], {"foo": "bar", "baz": []}, []], new_list - ) - # Ignore dict - old_list = [ - "foo", - ["foo"], - ["foo", None], - {"foo": 0}, - {"foo": "bar", "baz": []}, - [{"foo": ""}], - ] - new_list = salt.utils.data.filter_falsey( - old_list, recurse_depth=3, ignore_types=[type({})] - ) - self.assertEqual(["foo", ["foo"], ["foo"], {}, {"foo": "bar"}, [{}]], new_list) - # Ignore NoneType - old_list = [ - "foo", - ["foo"], - ["foo", None], - {"foo": 0}, - {"foo": "bar", "baz": []}, - [{"foo": ""}], - ] - new_list = salt.utils.data.filter_falsey( - old_list, recurse_depth=3, ignore_types=[type(None)] - ) - self.assertEqual(["foo", ["foo"], ["foo", None], {"foo": "bar"}], new_list) - - -class FilterRecursiveDiff(TestCase): - """ - Test suite for salt.utils.data.recursive_diff - """ - - def test_list_equality(self): - """ - Test cases where equal lists are compared. - """ - test_list = [0, 1, 2] - self.assertEqual({}, salt.utils.data.recursive_diff(test_list, test_list)) - - test_list = [[0], [1], [0, 1, 2]] - self.assertEqual({}, salt.utils.data.recursive_diff(test_list, test_list)) - - def test_dict_equality(self): - """ - Test cases where equal dicts are compared. - """ - test_dict = {"foo": "bar", "bar": {"baz": {"qux": "quux"}}, "frop": 0} - self.assertEqual({}, salt.utils.data.recursive_diff(test_dict, test_dict)) - - def test_ordereddict_equality(self): - """ - Test cases where equal OrderedDicts are compared. - """ - test_dict = OrderedDict( - [ - ("foo", "bar"), - ("bar", OrderedDict([("baz", OrderedDict([("qux", "quux")]))])), - ("frop", 0), - ] - ) - self.assertEqual({}, salt.utils.data.recursive_diff(test_dict, test_dict)) - - def test_mixed_equality(self): - """ - Test cases where mixed nested lists and dicts are compared. - """ - test_data = { - "foo": "bar", - "baz": [0, 1, 2], - "bar": {"baz": [{"qux": "quux"}, {"froop", 0}]}, - } - self.assertEqual({}, salt.utils.data.recursive_diff(test_data, test_data)) - - def test_set_equality(self): - """ - Test cases where equal sets are compared. - """ - test_set = {0, 1, 2, 3, "foo"} - self.assertEqual({}, salt.utils.data.recursive_diff(test_set, test_set)) - - # This is a bit of an oddity, as python seems to sort the sets in memory - # so both sets end up with the same ordering (0..3). - set_one = {0, 1, 2, 3} - set_two = {3, 2, 1, 0} - self.assertEqual({}, salt.utils.data.recursive_diff(set_one, set_two)) - - def test_tuple_equality(self): - """ - Test cases where equal tuples are compared. - """ - test_tuple = (0, 1, 2, 3, "foo") - self.assertEqual({}, salt.utils.data.recursive_diff(test_tuple, test_tuple)) - - def test_list_inequality(self): - """ - Test cases where two inequal lists are compared. - """ - list_one = [0, 1, 2] - list_two = ["foo", "bar", "baz"] - expected_result = {"old": list_one, "new": list_two} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_one, list_two) - ) - expected_result = {"new": list_one, "old": list_two} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_two, list_one) - ) - - list_one = [0, "foo", 1, "bar"] - list_two = [1, "foo", 1, "qux"] - expected_result = {"old": [0, "bar"], "new": [1, "qux"]} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_one, list_two) - ) - expected_result = {"new": [0, "bar"], "old": [1, "qux"]} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_two, list_one) - ) - - list_one = [0, 1, [2, 3]] - list_two = [0, 1, ["foo", "bar"]] - expected_result = {"old": [[2, 3]], "new": [["foo", "bar"]]} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_one, list_two) - ) - expected_result = {"new": [[2, 3]], "old": [["foo", "bar"]]} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_two, list_one) - ) - - def test_dict_inequality(self): - """ - Test cases where two inequal dicts are compared. - """ - dict_one = {"foo": 1, "bar": 2, "baz": 3} - dict_two = {"foo": 2, 1: "bar", "baz": 3} - expected_result = {"old": {"foo": 1, "bar": 2}, "new": {"foo": 2, 1: "bar"}} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(dict_one, dict_two) - ) - expected_result = {"new": {"foo": 1, "bar": 2}, "old": {"foo": 2, 1: "bar"}} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(dict_two, dict_one) - ) - - dict_one = {"foo": {"bar": {"baz": 1}}} - dict_two = {"foo": {"qux": {"baz": 1}}} - expected_result = {"old": dict_one, "new": dict_two} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(dict_one, dict_two) - ) - expected_result = {"new": dict_one, "old": dict_two} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(dict_two, dict_one) - ) - - def test_ordereddict_inequality(self): - """ - Test cases where two inequal OrderedDicts are compared. - """ - odict_one = OrderedDict([("foo", "bar"), ("bar", "baz")]) - odict_two = OrderedDict([("bar", "baz"), ("foo", "bar")]) - expected_result = {"old": odict_one, "new": odict_two} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(odict_one, odict_two) - ) - - def test_set_inequality(self): - """ - Test cases where two inequal sets are compared. - Tricky as the sets are compared zipped, so shuffled sets of equal values - are considered different. - """ - set_one = {0, 1, 2, 4} - set_two = {0, 1, 3, 4} - expected_result = {"old": {2}, "new": {3}} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(set_one, set_two) - ) - expected_result = {"new": {2}, "old": {3}} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(set_two, set_one) - ) - - # It is unknown how different python versions will store sets in memory. - # Python 2.7 seems to sort it (i.e. set_one below becomes {0, 1, 'foo', 'bar'} - # However Python 3.6.8 stores it differently each run. - # So just test for "not equal" here. - set_one = {0, "foo", 1, "bar"} - set_two = {"foo", 1, "bar", 2} - expected_result = {} - self.assertNotEqual( - expected_result, salt.utils.data.recursive_diff(set_one, set_two) - ) - - def test_mixed_inequality(self): - """ - Test cases where two mixed dicts/iterables that are different are compared. - """ - dict_one = {"foo": [1, 2, 3]} - dict_two = {"foo": [3, 2, 1]} - expected_result = {"old": {"foo": [1, 3]}, "new": {"foo": [3, 1]}} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(dict_one, dict_two) - ) - expected_result = {"new": {"foo": [1, 3]}, "old": {"foo": [3, 1]}} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(dict_two, dict_one) - ) - - list_one = [1, 2, {"foo": ["bar", {"foo": 1, "bar": 2}]}] - list_two = [3, 4, {"foo": ["qux", {"foo": 1, "bar": 2}]}] - expected_result = { - "old": [1, 2, {"foo": ["bar"]}], - "new": [3, 4, {"foo": ["qux"]}], - } - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_one, list_two) - ) - expected_result = { - "new": [1, 2, {"foo": ["bar"]}], - "old": [3, 4, {"foo": ["qux"]}], - } - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_two, list_one) - ) - - mixed_one = {"foo": {0, 1, 2}, "bar": [0, 1, 2]} - mixed_two = {"foo": {1, 2, 3}, "bar": [1, 2, 3]} - expected_result = { - "old": {"foo": {0}, "bar": [0, 1, 2]}, - "new": {"foo": {3}, "bar": [1, 2, 3]}, - } - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(mixed_one, mixed_two) - ) - expected_result = { - "new": {"foo": {0}, "bar": [0, 1, 2]}, - "old": {"foo": {3}, "bar": [1, 2, 3]}, - } - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(mixed_two, mixed_one) - ) - - def test_tuple_inequality(self): - """ - Test cases where two tuples that are different are compared. - """ - tuple_one = (1, 2, 3) - tuple_two = (3, 2, 1) - expected_result = {"old": (1, 3), "new": (3, 1)} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(tuple_one, tuple_two) - ) - - def test_list_vs_set(self): - """ - Test case comparing a list with a set, will be compared unordered. - """ - mixed_one = [1, 2, 3] - mixed_two = {3, 2, 1} - expected_result = {} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(mixed_one, mixed_two) - ) - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(mixed_two, mixed_one) - ) - - def test_dict_vs_ordereddict(self): - """ - Test case comparing a dict with an ordereddict, will be compared unordered. - """ - test_dict = {"foo": "bar", "bar": "baz"} - test_odict = OrderedDict([("foo", "bar"), ("bar", "baz")]) - self.assertEqual({}, salt.utils.data.recursive_diff(test_dict, test_odict)) - self.assertEqual({}, salt.utils.data.recursive_diff(test_odict, test_dict)) - - test_odict2 = OrderedDict([("bar", "baz"), ("foo", "bar")]) - self.assertEqual({}, salt.utils.data.recursive_diff(test_dict, test_odict2)) - self.assertEqual({}, salt.utils.data.recursive_diff(test_odict2, test_dict)) - - def test_list_ignore_ignored(self): - """ - Test case comparing two lists with ignore-list supplied (which is not used - when comparing lists). - """ - list_one = [1, 2, 3] - list_two = [3, 2, 1] - expected_result = {"old": [1, 3], "new": [3, 1]} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff(list_one, list_two, ignore_keys=[1, 3]), - ) - - def test_dict_ignore(self): - """ - Test case comparing two dicts with ignore-list supplied. - """ - dict_one = {"foo": 1, "bar": 2, "baz": 3} - dict_two = {"foo": 3, "bar": 2, "baz": 1} - expected_result = {"old": {"baz": 3}, "new": {"baz": 1}} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff(dict_one, dict_two, ignore_keys=["foo"]), - ) - - def test_ordereddict_ignore(self): - """ - Test case comparing two OrderedDicts with ignore-list supplied. - """ - odict_one = OrderedDict([("foo", 1), ("bar", 2), ("baz", 3)]) - odict_two = OrderedDict([("baz", 1), ("bar", 2), ("foo", 3)]) - # The key 'foo' will be ignored, which means the key from the other OrderedDict - # will always be considered "different" since OrderedDicts are compared ordered. - expected_result = { - "old": OrderedDict([("baz", 3)]), - "new": OrderedDict([("baz", 1)]), - } - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff(odict_one, odict_two, ignore_keys=["foo"]), - ) - - def test_dict_vs_ordereddict_ignore(self): - """ - Test case comparing a dict with an OrderedDict with ignore-list supplied. - """ - dict_one = {"foo": 1, "bar": 2, "baz": 3} - odict_two = OrderedDict([("foo", 3), ("bar", 2), ("baz", 1)]) - expected_result = {"old": {"baz": 3}, "new": OrderedDict([("baz", 1)])} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff(dict_one, odict_two, ignore_keys=["foo"]), - ) - - def test_mixed_nested_ignore(self): - """ - Test case comparing mixed, nested items with ignore-list supplied. - """ - dict_one = {"foo": [1], "bar": {"foo": 1, "bar": 2}, "baz": 3} - dict_two = {"foo": [2], "bar": {"foo": 3, "bar": 2}, "baz": 1} - expected_result = {"old": {"baz": 3}, "new": {"baz": 1}} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff(dict_one, dict_two, ignore_keys=["foo"]), - ) - - def test_ordered_dict_unequal_length(self): - """ - Test case comparing two OrderedDicts of unequal length. - """ - odict_one = OrderedDict([("foo", 1), ("bar", 2), ("baz", 3)]) - odict_two = OrderedDict([("foo", 1), ("bar", 2)]) - expected_result = {"old": OrderedDict([("baz", 3)]), "new": {}} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(odict_one, odict_two) - ) - - def test_list_unequal_length(self): - """ - Test case comparing two lists of unequal length. - """ - list_one = [1, 2, 3] - list_two = [1, 2, 3, 4] - expected_result = {"old": [], "new": [4]} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(list_one, list_two) - ) - - def test_set_unequal_length(self): - """ - Test case comparing two sets of unequal length. - This does not do anything special, as it is unordered. - """ - set_one = {1, 2, 3} - set_two = {4, 3, 2, 1} - expected_result = {"old": set(), "new": {4}} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(set_one, set_two) - ) - - def test_tuple_unequal_length(self): - """ - Test case comparing two tuples of unequal length. - This should be the same as comparing two ordered lists. - """ - tuple_one = (1, 2, 3) - tuple_two = (1, 2, 3, 4) - expected_result = {"old": (), "new": (4,)} - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(tuple_one, tuple_two) - ) - - def test_list_unordered(self): - """ - Test case comparing two lists unordered. - """ - list_one = [1, 2, 3, 4] - list_two = [4, 3, 2] - expected_result = {"old": [1], "new": []} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff(list_one, list_two, ignore_order=True), - ) - - def test_mixed_nested_unordered(self): - """ - Test case comparing nested dicts/lists unordered. - """ - dict_one = {"foo": {"bar": [1, 2, 3]}, "bar": [{"foo": 4}, 0]} - dict_two = {"foo": {"bar": [3, 2, 1]}, "bar": [0, {"foo": 4}]} - expected_result = {} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff(dict_one, dict_two, ignore_order=True), - ) - expected_result = { - "old": {"foo": {"bar": [1, 3]}, "bar": [{"foo": 4}, 0]}, - "new": {"foo": {"bar": [3, 1]}, "bar": [0, {"foo": 4}]}, - } - self.assertEqual( - expected_result, salt.utils.data.recursive_diff(dict_one, dict_two) - ) - - def test_ordered_dict_unordered(self): - """ - Test case comparing OrderedDicts unordered. - """ - odict_one = OrderedDict([("foo", 1), ("bar", 2), ("baz", 3)]) - odict_two = OrderedDict([("baz", 3), ("bar", 2), ("foo", 1)]) - expected_result = {} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff(odict_one, odict_two, ignore_order=True), - ) - - def test_ignore_missing_keys_dict(self): - """ - Test case ignoring missing keys on a comparison of dicts. - """ - dict_one = {"foo": 1, "bar": 2, "baz": 3} - dict_two = {"bar": 3} - expected_result = {"old": {"bar": 2}, "new": {"bar": 3}} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff( - dict_one, dict_two, ignore_missing_keys=True - ), - ) - - def test_ignore_missing_keys_ordered_dict(self): - """ - Test case not ignoring missing keys on a comparison of OrderedDicts. - """ - odict_one = OrderedDict([("foo", 1), ("bar", 2), ("baz", 3)]) - odict_two = OrderedDict([("bar", 3)]) - expected_result = {"old": odict_one, "new": odict_two} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff( - odict_one, odict_two, ignore_missing_keys=True - ), - ) - - def test_ignore_missing_keys_recursive(self): - """ - Test case ignoring missing keys on a comparison of nested dicts. - """ - dict_one = {"foo": {"bar": 2, "baz": 3}} - dict_two = {"foo": {"baz": 3}} - expected_result = {} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff( - dict_one, dict_two, ignore_missing_keys=True - ), - ) - # Compare from dict-in-dict - dict_two = {} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff( - dict_one, dict_two, ignore_missing_keys=True - ), - ) - # Compare from dict-in-list - dict_one = {"foo": ["bar", {"baz": 3}]} - dict_two = {"foo": ["bar", {}]} - self.assertEqual( - expected_result, - salt.utils.data.recursive_diff( - dict_one, dict_two, ignore_missing_keys=True - ), - ) diff --git a/tests/unit/utils/test_find.py b/tests/unit/utils/test_find.py index bc81c48554d8..1960d4a35103 100644 --- a/tests/unit/utils/test_find.py +++ b/tests/unit/utils/test_find.py @@ -332,6 +332,7 @@ def test_print_option_requires(self): option = salt.utils.find.PrintOption("print", "path user") self.assertEqual(option.requires(), salt.utils.find._REQUIRES_STAT) + @pytest.mark.skip_on_fips_enabled_platform def test_print_option_execute(self): hello_file = os.path.join(self.tmpdir, "hello.txt") with salt.utils.files.fopen(hello_file, "w") as fp_: diff --git a/tests/unit/utils/test_gitfs.py b/tests/unit/utils/test_gitfs.py index b99da3ef9160..259ea056fcd4 100644 --- a/tests/unit/utils/test_gitfs.py +++ b/tests/unit/utils/test_gitfs.py @@ -2,36 +2,20 @@ These only test the provider selection and verification logic, they do not init any remotes. """ -import os -import shutil -from time import time + +import tempfile import pytest +import salt.ext.tornado.ioloop import salt.fileserver.gitfs import salt.utils.files import salt.utils.gitfs +import salt.utils.path import salt.utils.platform -import tests.support.paths -from salt.exceptions import FileserverConfigError from tests.support.mixins import AdaptedConfigurationTestCaseMixin -from tests.support.mock import MagicMock, patch from tests.support.unit import TestCase -try: - HAS_PYGIT2 = ( - salt.utils.gitfs.PYGIT2_VERSION - and salt.utils.gitfs.PYGIT2_VERSION >= salt.utils.gitfs.PYGIT2_MINVER - and salt.utils.gitfs.LIBGIT2_VERSION - and salt.utils.gitfs.LIBGIT2_VERSION >= salt.utils.gitfs.LIBGIT2_MINVER - ) -except AttributeError: - HAS_PYGIT2 = False - - -if HAS_PYGIT2: - import pygit2 - def _clear_instance_map(): try: @@ -44,6 +28,9 @@ def _clear_instance_map(): class TestGitBase(TestCase, AdaptedConfigurationTestCaseMixin): def setUp(self): + self._tmp_dir = tempfile.TemporaryDirectory() + tmp_name = self._tmp_dir.name + class MockedProvider( salt.utils.gitfs.GitProvider ): # pylint: disable=abstract-method @@ -70,6 +57,7 @@ def __init__( ) def init_remote(self): + self.gitdir = salt.utils.path.join(tmp_name, ".git") self.repo = True new = False return new @@ -106,6 +94,7 @@ def tearDown(self): for remote in self.main_class.remotes: remote.fetched = False del self.main_class + self._tmp_dir.cleanup() def test_update_all(self): self.main_class.update() @@ -125,213 +114,60 @@ def test_update_by_id_and_name(self): self.assertTrue(self.main_class.remotes[0].fetched) self.assertFalse(self.main_class.remotes[1].fetched) + def test_get_cachedir_basename(self): + self.assertEqual( + self.main_class.remotes[0].get_cache_basename(), + "_", + ) + self.assertEqual( + self.main_class.remotes[1].get_cache_basename(), + "_", + ) -class TestGitFSProvider(TestCase): - def setUp(self): - self.opts = {"cachedir": "/tmp/gitfs-test-cache"} - - def tearDown(self): - self.opts = None - - def test_provider_case_insensitive(self): + def test_git_provider_mp_lock(self): """ - Ensure that both lowercase and non-lowercase values are supported + Check that lock is released after provider.lock() """ - provider = "GitPython" - for role_name, role_class in ( - ("gitfs", salt.utils.gitfs.GitFS), - ("git_pillar", salt.utils.gitfs.GitPillar), - ("winrepo", salt.utils.gitfs.WinRepo), - ): - - key = "{}_provider".format(role_name) - with patch.object( - role_class, "verify_gitpython", MagicMock(return_value=True) - ): - with patch.object( - role_class, "verify_pygit2", MagicMock(return_value=False) - ): - args = [self.opts, {}] - kwargs = {"init_remotes": False} - if role_name == "winrepo": - kwargs["cache_root"] = "/tmp/winrepo-dir" - with patch.dict(self.opts, {key: provider}): - # Try to create an instance with uppercase letters in - # provider name. If it fails then a - # FileserverConfigError will be raised, so no assert is - # necessary. - role_class(*args, **kwargs) - # Now try to instantiate an instance with all lowercase - # letters. Again, no need for an assert here. - role_class(*args, **kwargs) + provider = self.main_class.remotes[0] + provider.lock() + # check that lock has been released + self.assertTrue(provider._master_lock.acquire(timeout=5)) + provider._master_lock.release() - def test_valid_provider(self): + def test_git_provider_mp_clear_lock(self): """ - Ensure that an invalid provider is not accepted, raising a - FileserverConfigError. + Check that lock is released after provider.clear_lock() """ - - def _get_mock(verify, provider): - """ - Return a MagicMock with the desired return value - """ - return MagicMock(return_value=verify.endswith(provider)) - - for role_name, role_class in ( - ("gitfs", salt.utils.gitfs.GitFS), - ("git_pillar", salt.utils.gitfs.GitPillar), - ("winrepo", salt.utils.gitfs.WinRepo), - ): - key = "{}_provider".format(role_name) - for provider in salt.utils.gitfs.GIT_PROVIDERS: - verify = "verify_gitpython" - mock1 = _get_mock(verify, provider) - with patch.object(role_class, verify, mock1): - verify = "verify_pygit2" - mock2 = _get_mock(verify, provider) - with patch.object(role_class, verify, mock2): - args = [self.opts, {}] - kwargs = {"init_remotes": False} - if role_name == "winrepo": - kwargs["cache_root"] = "/tmp/winrepo-dir" - - with patch.dict(self.opts, {key: provider}): - role_class(*args, **kwargs) - - with patch.dict(self.opts, {key: "foo"}): - # Set the provider name to a known invalid provider - # and make sure it raises an exception. - self.assertRaises( - FileserverConfigError, role_class, *args, **kwargs - ) - - -@pytest.mark.skipif(not HAS_PYGIT2, reason="This host lacks proper pygit2 support") -@pytest.mark.skip_on_windows( - reason="Skip Pygit2 on windows, due to pygit2 access error on windows" -) -class TestPygit2(TestCase): - def _prepare_remote_repository(self, path): - shutil.rmtree(path, ignore_errors=True) - - filecontent = "This is an empty README file" - filename = "README" - - signature = pygit2.Signature( - "Dummy Commiter", "dummy@dummy.com", int(time()), 0 - ) - - repository = pygit2.init_repository(path, False) - builder = repository.TreeBuilder() - tree = builder.write() - commit = repository.create_commit( - "HEAD", signature, signature, "Create master branch", tree, [] - ) - repository.create_reference("refs/tags/simple_tag", commit) - - with salt.utils.files.fopen( - os.path.join(repository.workdir, filename), "w" - ) as file: - file.write(filecontent) - - blob = repository.create_blob_fromworkdir(filename) - builder = repository.TreeBuilder() - builder.insert(filename, blob, pygit2.GIT_FILEMODE_BLOB) - tree = builder.write() - - repository.index.read() - repository.index.add(filename) - repository.index.write() - - commit = repository.create_commit( - "HEAD", - signature, - signature, - "Added a README", - tree, - [repository.head.target], - ) - repository.create_tag( - "annotated_tag", commit, pygit2.GIT_OBJ_COMMIT, signature, "some message" - ) - - def _prepare_cache_repository(self, remote, cache): - opts = { - "cachedir": cache, - "__role": "minion", - "gitfs_disable_saltenv_mapping": False, - "gitfs_base": "master", - "gitfs_insecure_auth": False, - "gitfs_mountpoint": "", - "gitfs_passphrase": "", - "gitfs_password": "", - "gitfs_privkey": "", - "gitfs_provider": "pygit2", - "gitfs_pubkey": "", - "gitfs_ref_types": ["branch", "tag", "sha"], - "gitfs_refspecs": [ - "+refs/heads/*:refs/remotes/origin/*", - "+refs/tags/*:refs/tags/*", - ], - "gitfs_root": "", - "gitfs_saltenv_blacklist": [], - "gitfs_saltenv_whitelist": [], - "gitfs_ssl_verify": True, - "gitfs_update_interval": 3, - "gitfs_user": "", - "verified_gitfs_provider": "pygit2", - } - per_remote_defaults = { - "base": "master", - "disable_saltenv_mapping": False, - "insecure_auth": False, - "ref_types": ["branch", "tag", "sha"], - "passphrase": "", - "mountpoint": "", - "password": "", - "privkey": "", - "pubkey": "", - "refspecs": [ - "+refs/heads/*:refs/remotes/origin/*", - "+refs/tags/*:refs/tags/*", - ], - "root": "", - "saltenv_blacklist": [], - "saltenv_whitelist": [], - "ssl_verify": True, - "update_interval": 60, - "user": "", - } - per_remote_only = ("all_saltenvs", "name", "saltenv") - override_params = tuple(per_remote_defaults.keys()) - cache_root = os.path.join(cache, "gitfs") - role = "gitfs" - shutil.rmtree(cache_root, ignore_errors=True) - provider = salt.utils.gitfs.Pygit2( - opts, - remote, - per_remote_defaults, - per_remote_only, - override_params, - cache_root, - role, - ) - return provider - - def test_checkout(self): - remote = os.path.join(tests.support.paths.TMP, "pygit2-repo") - cache = os.path.join(tests.support.paths.TMP, "pygit2-repo-cache") - self._prepare_remote_repository(remote) - provider = self._prepare_cache_repository(remote, cache) - provider.remotecallbacks = None - provider.credentials = None - provider.init_remote() - provider.fetch() - provider.branch = "master" - self.assertIn(provider.cachedir, provider.checkout()) - provider.branch = "simple_tag" - self.assertIn(provider.cachedir, provider.checkout()) - provider.branch = "annotated_tag" - self.assertIn(provider.cachedir, provider.checkout()) - provider.branch = "does_not_exist" - self.assertIsNone(provider.checkout()) + provider = self.main_class.remotes[0] + provider.clear_lock() + # check that lock has been released + self.assertTrue(provider._master_lock.acquire(timeout=5)) + provider._master_lock.release() + + @pytest.mark.slow_test + def test_git_provider_mp_lock_timeout(self): + """ + Check that lock will time out if master lock is locked. + """ + provider = self.main_class.remotes[0] + # Hijack the lock so git provider is fooled into thinking another instance is doing somthing. + self.assertTrue(provider._master_lock.acquire(timeout=5)) + try: + # git provider should raise timeout error to avoid lock race conditions + self.assertRaises(TimeoutError, provider.lock) + finally: + provider._master_lock.release() + + @pytest.mark.slow_test + def test_git_provider_mp_clear_lock_timeout(self): + """ + Check that clear lock will time out if master lock is locked. + """ + provider = self.main_class.remotes[0] + # Hijack the lock so git provider is fooled into thinking another instance is doing somthing. + self.assertTrue(provider._master_lock.acquire(timeout=5)) + try: + # git provider should raise timeout error to avoid lock race conditions + self.assertRaises(TimeoutError, provider.clear_lock) + finally: + provider._master_lock.release() diff --git a/tests/unit/utils/test_hashutils.py b/tests/unit/utils/test_hashutils.py index 5cf11c114efc..b9a685957a53 100644 --- a/tests/unit/utils/test_hashutils.py +++ b/tests/unit/utils/test_hashutils.py @@ -1,3 +1,5 @@ +import pytest + import salt.utils.hashutils from tests.support.unit import TestCase @@ -87,6 +89,7 @@ def test_base64_decodestring(self): self.bytes, ) + @pytest.mark.skip_on_fips_enabled_platform def test_md5_digest(self): """ Ensure that this function converts the value passed to bytes before diff --git a/tests/unit/utils/test_http.py b/tests/unit/utils/test_http.py deleted file mode 100644 index d9a84f9582a5..000000000000 --- a/tests/unit/utils/test_http.py +++ /dev/null @@ -1,299 +0,0 @@ -""" - :codeauthor: Nicole Thomas -""" - -import socket -from contextlib import closing - -import pytest -from saltfactories.utils.tempfiles import temp_file - -import salt.utils.http as http -from tests.support.helpers import MirrorPostHandler, Webserver -from tests.support.mock import MagicMock, patch -from tests.support.runtests import RUNTIME_VARS -from tests.support.unit import TestCase - -try: - import salt.ext.tornado.curl_httpclient # pylint: disable=unused-import - - HAS_CURL = True -except ImportError: - HAS_CURL = False - - -class HTTPTestCase(TestCase): - """ - Unit TestCase for the salt.utils.http module. - """ - - @classmethod - def setUpClass(cls): - cls.post_webserver = Webserver(handler=MirrorPostHandler) - cls.post_webserver.start() - cls.post_web_root = cls.post_webserver.web_root - - @classmethod - def tearDownClass(cls): - cls.post_webserver.stop() - del cls.post_webserver - - # sanitize_url tests - - def test_sanitize_url_hide_fields_none(self): - """ - Tests sanitizing a url when the hide_fields kwarg is None. - """ - mock_url = "https://api.testing.com/?&foo=bar&test=testing" - ret = http.sanitize_url(mock_url, hide_fields=None) - self.assertEqual(ret, mock_url) - - def test_sanitize_url_no_elements(self): - """ - Tests sanitizing a url when no elements should be sanitized. - """ - mock_url = "https://api.testing.com/?&foo=bar&test=testing" - ret = http.sanitize_url(mock_url, [""]) - self.assertEqual(ret, mock_url) - - def test_sanitize_url_single_element(self): - """ - Tests sanitizing a url with only a single element to be sanitized. - """ - mock_url = ( - "https://api.testing.com/?&keep_it_secret=abcdefghijklmn" - "&api_action=module.function" - ) - mock_ret = ( - "https://api.testing.com/?&keep_it_secret=XXXXXXXXXX&" - "api_action=module.function" - ) - ret = http.sanitize_url(mock_url, ["keep_it_secret"]) - self.assertEqual(ret, mock_ret) - - def test_sanitize_url_multiple_elements(self): - """ - Tests sanitizing a url with multiple elements to be sanitized. - """ - mock_url = ( - "https://api.testing.com/?rootPass=badpassword%21" - "&skipChecks=True&api_key=abcdefghijklmn" - "&NodeID=12345&api_action=module.function" - ) - mock_ret = ( - "https://api.testing.com/?rootPass=XXXXXXXXXX" - "&skipChecks=True&api_key=XXXXXXXXXX" - "&NodeID=12345&api_action=module.function" - ) - ret = http.sanitize_url(mock_url, ["api_key", "rootPass"]) - self.assertEqual(ret, mock_ret) - - # _sanitize_components tests - - def test_sanitize_components_no_elements(self): - """ - Tests when zero elements need to be sanitized. - """ - mock_component_list = ["foo=bar", "bar=baz", "hello=world"] - mock_ret = "foo=bar&bar=baz&hello=world&" - ret = http._sanitize_url_components(mock_component_list, "api_key") - self.assertEqual(ret, mock_ret) - - def test_sanitize_components_one_element(self): - """ - Tests a single component to be sanitized. - """ - mock_component_list = ["foo=bar", "api_key=abcdefghijklmnop"] - mock_ret = "foo=bar&api_key=XXXXXXXXXX&" - ret = http._sanitize_url_components(mock_component_list, "api_key") - self.assertEqual(ret, mock_ret) - - def test_sanitize_components_multiple_elements(self): - """ - Tests two componenets to be sanitized. - """ - mock_component_list = ["foo=bar", "foo=baz", "api_key=testing"] - mock_ret = "foo=XXXXXXXXXX&foo=XXXXXXXXXX&api_key=testing&" - ret = http._sanitize_url_components(mock_component_list, "foo") - self.assertEqual(ret, mock_ret) - - @pytest.mark.slow_test - def test_query_null_response(self): - """ - This tests that we get a null response when raise_error=False and the - host/port cannot be reached. - """ - host = "127.0.0.1" - - # Find unused port - with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as sock: - sock.bind((host, 0)) - port = sock.getsockname()[1] - - url = "http://{host}:{port}/".format(host=host, port=port) - result = http.query(url, raise_error=False) - assert result == {"body": None}, result - - def test_query_error_handling(self): - ret = http.query("http://127.0.0.1:0") - self.assertTrue(isinstance(ret, dict)) - self.assertTrue(isinstance(ret.get("error", None), str)) - ret = http.query("http://myfoobardomainthatnotexist") - self.assertTrue(isinstance(ret, dict)) - self.assertTrue(isinstance(ret.get("error", None), str)) - - def test_parse_cookie_header(self): - header = "; ".join( - [ - "foo=bar", - "expires=Mon, 03-Aug-20 14:26:27 GMT", - "path=/", - "domain=.mydomain.tld", - "HttpOnly", - "SameSite=Lax", - "Secure", - ] - ) - ret = http.parse_cookie_header(header) - cookie = ret.pop(0) - assert cookie.name == "foo", cookie.name - assert cookie.value == "bar", cookie.value - assert cookie.expires == 1596464787, cookie.expires - assert cookie.path == "/", cookie.path - assert cookie.domain == ".mydomain.tld", cookie.domain - assert cookie.secure - # Only one cookie should have been returned, if anything is left in the - # parse_cookie_header return then something went wrong. - assert not ret - - -class HTTPPostTestCase(TestCase): - """ - Unit TestCase for the salt.utils.http module when - using POST method - """ - - @classmethod - def setUpClass(cls): - cls.post_webserver = Webserver(handler=MirrorPostHandler) - cls.post_webserver.start() - cls.post_web_root = cls.post_webserver.web_root - - @classmethod - def tearDownClass(cls): - cls.post_webserver.stop() - del cls.post_webserver - - def test_requests_multipart_formdata_post(self): - """ - Test handling of a multipart/form-data POST using the requests backend - """ - match_this = ( - "{0}\r\nContent-Disposition: form-data;" - ' name="fieldname_here"\r\n\r\nmydatahere\r\n{0}--\r\n' - ) - ret = http.query( - self.post_web_root, - method="POST", - data="mydatahere", - formdata=True, - formdata_fieldname="fieldname_here", - backend="requests", - ) - body = ret.get("body", "") - boundary = body[: body.find("\r")] - self.assertEqual(body, match_this.format(boundary)) - - @pytest.mark.skipif( - HAS_CURL is False, - reason="Missing prerequisites for tornado.curl_httpclient library", - ) - def test_query_proxy(self): - """ - Test http.query with tornado and with proxy opts set - and then test with no_proxy set to ensure we dont - run into issue #55192 again. - """ - data = "mydatahere" - opts = { - "proxy_host": "127.0.0.1", - "proxy_port": 88, - "proxy_username": "salt_test", - "proxy_password": "super_secret", - } - - mock_curl = MagicMock() - - with patch("tornado.httpclient.HTTPClient.fetch", mock_curl): - ret = http.query( - self.post_web_root, - method="POST", - data=data, - backend="tornado", - opts=opts, - ) - - for opt in opts: - assert opt in mock_curl.call_args_list[0][1].keys() - - opts["no_proxy"] = ["127.0.0.1"] - - ret = http.query( - self.post_web_root, method="POST", data=data, backend="tornado", opts=opts - ) - body = ret.get("body", "") - assert body == data - - -class HTTPGetTestCase(TestCase): - """ - Unit TestCase for the salt.utils.http module when - using Get method - """ - - @classmethod - def setUpClass(cls): - cls.get_webserver = Webserver() - cls.get_webserver.start() - - @classmethod - def tearDownClass(cls): - cls.get_webserver.stop() - del cls.get_webserver - - def test_backends_decode_body_false(self): - """ - test all backends when using - decode_body=False that it returns - bytes and does not try to decode - """ - for backend in ["tornado", "requests", "urllib2"]: - ret = http.query( - self.get_webserver.url("custom.tar.gz"), - backend=backend, - decode_body=False, - ) - body = ret.get("body", "") - assert isinstance(body, bytes) - - def test_backends_decode_body_true(self): - """ - test all backends when using - decode_body=True that it returns - string and decodes it. - """ - core_state = """ - {}: - file: - - managed - - source: salt://testfile - - makedirs: true - """.format( - RUNTIME_VARS.TMP - ) - - with temp_file("core.sls", core_state, self.get_webserver.root): - for backend in ["tornado", "requests", "urllib2"]: - ret = http.query(self.get_webserver.url("core.sls"), backend=backend) - body = ret.get("body", "") - assert isinstance(body, str) diff --git a/tests/unit/utils/test_listdiffer.py b/tests/unit/utils/test_listdiffer.py deleted file mode 100644 index cd1922a5f6e0..000000000000 --- a/tests/unit/utils/test_listdiffer.py +++ /dev/null @@ -1,109 +0,0 @@ -from salt.utils import dictdiffer -from salt.utils.listdiffer import list_diff -from tests.support.unit import TestCase - -NONE = dictdiffer.RecursiveDictDiffer.NONE_VALUE - - -class ListDictDifferTestCase(TestCase): - def setUp(self): - old_list = [ - {"key": 1, "value": "foo1", "int_value": 101}, - {"key": 2, "value": "foo2", "int_value": 102}, - {"key": 3, "value": "foo3", "int_value": 103}, - ] - new_list = [ - {"key": 1, "value": "foo1", "int_value": 101}, - {"key": 2, "value": "foo2", "int_value": 112}, - {"key": 5, "value": "foo5", "int_value": 105}, - ] - self.list_diff = list_diff(old_list, new_list, key="key") - - def tearDown(self): - for attrname in ("list_diff",): - try: - delattr(self, attrname) - except AttributeError: - continue - - def test_added(self): - self.assertEqual(len(self.list_diff.added), 1) - self.assertDictEqual( - self.list_diff.added[0], {"key": 5, "value": "foo5", "int_value": 105} - ) - - def test_removed(self): - self.assertEqual(len(self.list_diff.removed), 1) - self.assertDictEqual( - self.list_diff.removed[0], {"key": 3, "value": "foo3", "int_value": 103} - ) - - def test_diffs(self): - self.assertEqual(len(self.list_diff.diffs), 3) - self.assertDictEqual( - self.list_diff.diffs[0], {2: {"int_value": {"new": 112, "old": 102}}} - ) - self.assertDictEqual( - self.list_diff.diffs[1], - # Added items - { - 5: { - "int_value": {"new": 105, "old": NONE}, - "key": {"new": 5, "old": NONE}, - "value": {"new": "foo5", "old": NONE}, - } - }, - ) - self.assertDictEqual( - self.list_diff.diffs[2], - # Removed items - { - 3: { - "int_value": {"new": NONE, "old": 103}, - "key": {"new": NONE, "old": 3}, - "value": {"new": NONE, "old": "foo3"}, - } - }, - ) - - def test_new_values(self): - self.assertEqual(len(self.list_diff.new_values), 2) - self.assertDictEqual(self.list_diff.new_values[0], {"key": 2, "int_value": 112}) - self.assertDictEqual( - self.list_diff.new_values[1], {"key": 5, "value": "foo5", "int_value": 105} - ) - - def test_old_values(self): - self.assertEqual(len(self.list_diff.old_values), 2) - self.assertDictEqual(self.list_diff.old_values[0], {"key": 2, "int_value": 102}) - self.assertDictEqual( - self.list_diff.old_values[1], {"key": 3, "value": "foo3", "int_value": 103} - ) - - def test_changed_all(self): - self.assertEqual( - self.list_diff.changed(selection="all"), - [ - "key.2.int_value", - "key.5.int_value", - "key.5.value", - "key.3.int_value", - "key.3.value", - ], - ) - - def test_changed_intersect(self): - self.assertEqual( - self.list_diff.changed(selection="intersect"), ["key.2.int_value"] - ) - - def test_changes_str(self): - self.assertEqual( - self.list_diff.changes_str, - "\tidentified by key 2:\n" - "\tint_value from 102 to 112\n" - "\tidentified by key 3:\n" - "\twill be removed\n" - "\tidentified by key 5:\n" - "\twill be added\n", - ) diff --git a/tests/unit/utils/test_nacl.py b/tests/unit/utils/test_nacl.py deleted file mode 100644 index 0501c1fa4f5a..000000000000 --- a/tests/unit/utils/test_nacl.py +++ /dev/null @@ -1,149 +0,0 @@ -import os -import sys - -import pytest - -import salt.modules.config as config -import salt.utils.files -from tests.support.helpers import with_tempfile -from tests.support.mixins import LoaderModuleMockMixin -from tests.support.unit import TestCase - -try: - import libnacl.sealed # pylint: disable=unused-import - import libnacl.secret # pylint: disable=unused-import - - import salt.utils.nacl as nacl - - HAS_LIBNACL = True -except (ImportError, OSError, AttributeError): - HAS_LIBNACL = False - - -@pytest.mark.skipif(sys.version_info >= (3, 10), reason="Segfaults with python 3.10") -@pytest.mark.skipif( - not HAS_LIBNACL, reason="skipping test_nacl, reason=libnacl is unavailable" -) -class NaclUtilsTests(TestCase, LoaderModuleMockMixin): - def setup_loader_modules(self): - return { - nacl: {"__salt__": {"config.get": config.get}}, - config: {"__opts__": {}}, - } - - def setUp(self): - self.key = "C16NxgBhw8cqbhvPCDAn2pirwW1A1WEVLUexCsoUD2Y=" - self.pub = "+XWFfZXnfItS++a4gQf8Adu1aUlTgHWyTfsglbTdXyg=" - - def test_keygen(self): - """ - test nacl.keygen function - """ - ret = nacl.keygen() - assert all(key in ret for key in ret.keys()) - - @with_tempfile() - def test_keygen_sk_file(self, fpath): - """ - test nacl.keygen function - with sk_file set - """ - with salt.utils.files.fopen(fpath, "w") as wfh: - wfh.write(self.key) - # test sk_file - ret = nacl.keygen(sk_file=fpath) - assert "saved pk_file: {}.pub".format(fpath) == ret - - @with_tempfile() - def test_keygen_keyfile(self, fpath): - """ - test nacl.keygen function - with keyfile set - """ - with salt.utils.files.fopen(fpath, "w") as wfh: - wfh.write(self.key) - - ret = nacl.keygen(keyfile=fpath) - assert "saved pk_file: {}.pub".format(fpath) == ret - - @with_tempfile() - def test_enc_keyfile(self, fpath): - """ - test nacl.enc function - with keyfile and pk_file set - """ - with salt.utils.files.fopen(fpath, "w") as wfh: - wfh.write(self.key) - with salt.utils.files.fopen(fpath + ".pub", "w") as wfh: - wfh.write(self.pub) - - kwargs = { - "opts": {"pki_dir": os.path.dirname(fpath)}, - "keyfile": fpath, - "pk_file": fpath + ".pub", - } - ret = nacl.enc("blah", **kwargs) - assert isinstance(ret, bytes) - - @with_tempfile() - def test_enc_sk_file(self, fpath): - """ - test nacl.enc function - with sk_file and pk_file set - """ - with salt.utils.files.fopen(fpath, "w") as wfh: - wfh.write(self.key) - with salt.utils.files.fopen(fpath + ".pub", "w") as wfh: - wfh.write(self.pub) - - kwargs = { - "opts": {"pki_dir": os.path.dirname(fpath)}, - "sk_file": fpath, - "pk_file": fpath + ".pub", - } - ret = nacl.enc("blah", **kwargs) - assert isinstance(ret, bytes) - - @with_tempfile() - def test_dec_keyfile(self, fpath): - """ - test nacl.dec function - with keyfile and pk_file set - """ - with salt.utils.files.fopen(fpath, "w") as wfh: - wfh.write(self.key) - with salt.utils.files.fopen(fpath + ".pub", "w") as wfh: - wfh.write(self.pub) - - kwargs = { - "opts": {"pki_dir": os.path.dirname(fpath)}, - "keyfile": fpath, - "pk_file": fpath + ".pub", - } - - enc_data = nacl.enc("blah", **kwargs) - ret = nacl.dec(enc_data, **kwargs) - assert isinstance(ret, bytes) - assert ret == b"blah" - - @with_tempfile() - def test_dec_sk_file(self, fpath): - """ - test nacl.dec function - with sk_file and pk_file set - """ - with salt.utils.files.fopen(fpath, "w") as wfh: - wfh.write(self.key) - with salt.utils.files.fopen(fpath + ".pub", "w") as wfh: - wfh.write(self.pub) - - kwargs = { - "opts": {"pki_dir": os.path.dirname(fpath)}, - "sk_file": fpath, - "pk_file": fpath + ".pub", - } - - enc_data = nacl.enc("blah", **kwargs) - ret = nacl.dec(enc_data, **kwargs) - assert isinstance(ret, bytes) - assert ret == b"blah" diff --git a/tests/unit/utils/test_network.py b/tests/unit/utils/test_network.py deleted file mode 100644 index f7d39729300f..000000000000 --- a/tests/unit/utils/test_network.py +++ /dev/null @@ -1,1313 +0,0 @@ -import logging -import socket -import textwrap -import time - -import pytest - -import salt.exceptions -import salt.utils.network as network -from salt._compat import ipaddress -from tests.support.mock import MagicMock, create_autospec, mock_open, patch -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - -LINUX = """\ -eth0 Link encap:Ethernet HWaddr e0:3f:49:85:6a:af - inet addr:10.10.10.56 Bcast:10.10.10.255 Mask:255.255.252.0 - inet6 addr: fe80::e23f:49ff:fe85:6aaf/64 Scope:Link - UP BROADCAST RUNNING MULTICAST MTU:1500 Metric:1 - RX packets:643363 errors:0 dropped:0 overruns:0 frame:0 - TX packets:196539 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:1000 - RX bytes:386388355 (368.4 MiB) TX bytes:25600939 (24.4 MiB) - -lo Link encap:Local Loopback - inet addr:127.0.0.1 Mask:255.0.0.0 - inet6 addr: ::1/128 Scope:Host - UP LOOPBACK RUNNING MTU:65536 Metric:1 - RX packets:548901 errors:0 dropped:0 overruns:0 frame:0 - TX packets:548901 errors:0 dropped:0 overruns:0 carrier:0 - collisions:0 txqueuelen:0 - RX bytes:613479895 (585.0 MiB) TX bytes:613479895 (585.0 MiB) -""" - -FREEBSD = """ -em0: flags=8843 metric 0 mtu 1500 - options=4219b - ether 00:30:48:ff:ff:ff - inet 10.10.10.250 netmask 0xffffffe0 broadcast 10.10.10.255 - inet 10.10.10.56 netmask 0xffffffc0 broadcast 10.10.10.63 - media: Ethernet autoselect (1000baseT ) - status: active -em1: flags=8c02 metric 0 mtu 1500 - options=4219b - ether 00:30:48:aa:aa:aa - media: Ethernet autoselect - status: no carrier -plip0: flags=8810 metric 0 mtu 1500 -lo0: flags=8049 metric 0 mtu 16384 - options=3 - inet6 fe80::1%lo0 prefixlen 64 scopeid 0x8 - inet6 ::1 prefixlen 128 - inet 127.0.0.1 netmask 0xff000000 - nd6 options=3 -tun0: flags=8051 metric 0 mtu 1500 - options=80000 - inet 10.12.0.1 --> 10.12.0.2 netmask 0xffffffff - Opened by PID 1964 -""" - -SOLARIS = """\ -lo0: flags=2001000849 mtu 8232 index 1 - inet 127.0.0.1 netmask ff000000 -net0: flags=100001100943 mtu 1500 index 2 - inet 10.10.10.38 netmask ffffffe0 broadcast 10.10.10.63 -ilbint0: flags=110001100843 mtu 1500 index 3 - inet 10.6.0.11 netmask ffffff00 broadcast 10.6.0.255 -ilbext0: flags=110001100843 mtu 1500 index 4 - inet 10.10.11.11 netmask ffffffe0 broadcast 10.10.11.31 -ilbext0:1: flags=110001100843 mtu 1500 index 4 - inet 10.10.11.12 netmask ffffffe0 broadcast 10.10.11.31 -vpn0: flags=1000011008d1 mtu 1480 index 5 - inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 - tunnel hop limit 64 - inet 10.6.0.14 --> 10.6.0.15 netmask ff000000 -lo0: flags=2002000849 mtu 8252 index 1 - inet6 ::1/128 -net0: flags=120002004941 mtu 1500 index 2 - inet6 fe80::221:9bff:fefd:2a22/10 -ilbint0: flags=120002000840 mtu 1500 index 3 - inet6 ::/0 -ilbext0: flags=120002000840 mtu 1500 index 4 - inet6 ::/0 -vpn0: flags=120002200850 mtu 1480 index 5 - inet tunnel src 10.10.11.12 tunnel dst 10.10.5.5 - tunnel hop limit 64 - inet6 ::/0 --> fe80::b2d6:7c10 -""" - -NETBSD = """\ -vioif0: flags=0x8943 mtu 1500 - ec_capabilities=1 - ec_enabled=0 - address: 00:a0:98:e6:83:18 - inet 192.168.1.80/24 broadcast 192.168.1.255 flags 0x0 - inet6 fe80::2a0:98ff:fee6:8318%vioif0/64 flags 0x0 scopeid 0x1 -lo0: flags=0x8049 mtu 33624 - inet 127.0.0.1/8 flags 0x0 - inet6 ::1/128 flags 0x20 - inet6 fe80::1%lo0/64 flags 0x0 scopeid 0x2 -""" - -FREEBSD_SOCKSTAT = """\ -USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS -root python2.7 1294 41 tcp4 127.0.0.1:61115 127.0.0.1:4506 -""" - -FREEBSD_SOCKSTAT_WITH_FAT_PID = """\ -USER COMMAND PID FD PROTO LOCAL ADDRESS FOREIGN ADDRESS -salt-master python2.781106 35 tcp4 127.0.0.1:61115 127.0.0.1:4506 -""" - -OPENBSD_NETSTAT = """\ -Active Internet connections -Proto Recv-Q Send-Q Local Address Foreign Address (state) -tcp 0 0 127.0.0.1.61115 127.0.0.1.4506 ESTABLISHED -""" - -LINUX_NETLINK_SS_OUTPUT = """\ -State Recv-Q Send-Q Local Address:Port Peer Address:Port -TIME-WAIT 0 0 [::1]:8009 [::1]:40368 -LISTEN 0 128 127.0.0.1:5903 0.0.0.0:* -ESTAB 0 0 [::ffff:127.0.0.1]:4506 [::ffff:127.0.0.1]:32315 -ESTAB 0 0 192.168.122.1:4506 192.168.122.177:24545 -ESTAB 0 0 127.0.0.1:56726 127.0.0.1:4505 -ESTAB 0 0 ::ffff:1.2.3.4:5678 ::ffff:1.2.3.4:4505 -""" - -IPV4_SUBNETS = { - True: ("10.10.0.0/24",), - False: ("10.10.0.0", "10.10.0.0/33", "FOO", 9, "0.9.800.1000/24"), -} -IPV6_SUBNETS = { - True: ("::1/128",), - False: ("::1", "::1/129", "FOO", 9, "aj01::feac/64"), -} - - -class NetworkTestCase(TestCase): - def test_sanitize_host_ip(self): - ret = network.sanitize_host("10.1./2.$3") - self.assertEqual(ret, "10.1.2.3") - - def test_sanitize_host_name(self): - """ - Should not remove the underscore - """ - ret = network.sanitize_host("foo_bar") - self.assertEqual(ret, "foo_bar") - - def test_host_to_ips(self): - """ - NOTE: When this test fails it's usually because the IP address has - changed. In these cases, we just need to update the IP address in the - assertion. - """ - - def _side_effect(host, *args): - try: - return { - "github.com": [ - (2, 1, 6, "", ("192.30.255.112", 0)), - (2, 1, 6, "", ("192.30.255.113", 0)), - ], - "ipv6host.foo": [ - (socket.AF_INET6, 1, 6, "", ("2001:a71::1", 0, 0, 0)), - ], - }[host] - except KeyError: - raise socket.gaierror(-2, "Name or service not known") - - getaddrinfo_mock = MagicMock(side_effect=_side_effect) - with patch.object(socket, "getaddrinfo", getaddrinfo_mock): - # Test host that can be resolved - ret = network.host_to_ips("github.com") - self.assertEqual(ret, ["192.30.255.112", "192.30.255.113"]) - # Test ipv6 - ret = network.host_to_ips("ipv6host.foo") - self.assertEqual(ret, ["2001:a71::1"]) - # Test host that can't be resolved - ret = network.host_to_ips("someothersite.com") - self.assertEqual(ret, None) - - def test_generate_minion_id(self): - self.assertTrue(network.generate_minion_id()) - - def test__generate_minion_id_with_unicode_in_etc_hosts(self): - """ - Test that unicode in /etc/hosts doesn't raise an error when - _generate_minion_id() helper is called to gather the hosts. - """ - content = textwrap.dedent( - """\ - # 以下为主机名解析 - ## ccc - 127.0.0.1 localhost thisismyhostname # 本机 - """ - ) - fopen_mock = mock_open(read_data={"/etc/hosts": content}) - with patch("salt.utils.files.fopen", fopen_mock): - assert "thisismyhostname" in network._generate_minion_id() - - def test_is_ip(self): - self.assertTrue(network.is_ip("10.10.0.3")) - self.assertFalse(network.is_ip("0.9.800.1000")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_is_ipv4(self): - self.assertTrue(network.is_ipv4("10.10.0.3")) - self.assertFalse(network.is_ipv4("10.100.1")) - self.assertFalse(network.is_ipv4("2001:db8:0:1:1:1:1:1")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv4("sixteen-char-str")) - - def test_is_ipv6(self): - self.assertTrue(network.is_ipv6("2001:db8:0:1:1:1:1:1")) - self.assertTrue(network.is_ipv6("0:0:0:0:0:0:0:1")) - self.assertTrue(network.is_ipv6("::1")) - self.assertTrue(network.is_ipv6("::")) - self.assertTrue(network.is_ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334")) - self.assertTrue(network.is_ipv6("2001:0db8:85a3::8a2e:0370:7334")) - self.assertFalse(network.is_ipv6("2001:0db8:0370:7334")) - self.assertFalse(network.is_ipv6("2001:0db8:::0370:7334")) - self.assertFalse(network.is_ipv6("10.0.1.2")) - self.assertFalse(network.is_ipv6("2001.0db8.85a3.0000.0000.8a2e.0370.7334")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_ipv6(self): - self.assertTrue(network.ipv6("2001:db8:0:1:1:1:1:1")) - self.assertTrue(network.ipv6("0:0:0:0:0:0:0:1")) - self.assertTrue(network.ipv6("::1")) - self.assertTrue(network.ipv6("::")) - self.assertTrue(network.ipv6("2001:0db8:85a3:0000:0000:8a2e:0370:7334")) - self.assertTrue(network.ipv6("2001:0db8:85a3::8a2e:0370:7334")) - self.assertTrue(network.ipv6("2001:67c:2e8::/48")) - - def test_is_loopback(self): - self.assertTrue(network.is_loopback("127.0.1.1")) - self.assertTrue(network.is_loopback("::1")) - self.assertFalse(network.is_loopback("10.0.1.2")) - self.assertFalse(network.is_loopback("2001:db8:0:1:1:1:1:1")) - # Check 16-char-long unicode string - # https://github.com/saltstack/salt/issues/51258 - self.assertFalse(network.is_ipv6("sixteen-char-str")) - - def test_parse_host_port(self): - _ip = ipaddress.ip_address - good_host_ports = { - "10.10.0.3": (_ip("10.10.0.3").compressed, None), - "10.10.0.3:1234": (_ip("10.10.0.3").compressed, 1234), - "2001:0db8:85a3::8a2e:0370:7334": ( - _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, - None, - ), - "[2001:0db8:85a3::8a2e:0370:7334]:1234": ( - _ip("2001:0db8:85a3::8a2e:0370:7334").compressed, - 1234, - ), - "2001:0db8:85a3::7334": (_ip("2001:0db8:85a3::7334").compressed, None), - "[2001:0db8:85a3::7334]:1234": ( - _ip("2001:0db8:85a3::7334").compressed, - 1234, - ), - } - bad_host_ports = [ - "10.10.0.3/24", - "10.10.0.3::1234", - "2001:0db8:0370:7334", - "2001:0db8:0370::7334]:1234", - "2001:0db8:0370:0:a:b:c:d:1234", - "host name", - "host name:1234", - "10.10.0.3:abcd", - ] - for host_port, assertion_value in good_host_ports.items(): - host = port = None - host, port = network.parse_host_port(host_port) - self.assertEqual((host, port), assertion_value) - - for host_port in bad_host_ports: - try: - self.assertRaises(ValueError, network.parse_host_port, host_port) - except AssertionError as _e_: - log.error( - 'bad host_port value: "%s" failed to trigger ValueError exception', - host_port, - ) - raise _e_ - - def test_dns_check(self): - hosts = [ - { - "host": "10.10.0.3", - "port": "", - "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], - "ret": "10.10.0.3", - }, - { - "host": "10.10.0.3", - "port": "1234", - "mocked": [(2, 1, 6, "", ("10.10.0.3", 0))], - "ret": "10.10.0.3", - }, - { - "host": "2001:0db8:85a3::8a2e:0370:7334", - "port": "", - "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], - "ret": "[2001:db8:85a3::8a2e:370:7334]", - }, - { - "host": "2001:0db8:85a3::8a2e:370:7334", - "port": "1234", - "mocked": [(10, 1, 6, "", ("2001:db8:85a3::8a2e:370:7334", 0, 0, 0))], - "ret": "[2001:db8:85a3::8a2e:370:7334]", - }, - { - "host": "salt-master", - "port": "1234", - "mocked": [(2, 1, 6, "", ("127.0.0.1", 0))], - "ret": "127.0.0.1", - }, - ] - for host in hosts: - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, return_value=host["mocked"]), - ): - with patch("socket.socket", create_autospec(socket.socket)): - ret = network.dns_check(host["host"], host["port"]) - self.assertEqual(ret, host["ret"]) - - def test_dns_check_ipv6_filter(self): - # raise exception to skip everything after the getaddrinfo call - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, side_effect=Exception), - ) as getaddrinfo: - for ipv6, param in [ - (None, socket.AF_UNSPEC), - (True, socket.AF_INET6), - (False, socket.AF_INET), - ]: - with self.assertRaises(Exception): - network.dns_check("foo", "1", ipv6=ipv6) - getaddrinfo.assert_called_with("foo", "1", param, socket.SOCK_STREAM) - - def test_dns_check_errors(self): - with patch.object( - socket, "getaddrinfo", create_autospec(socket.getaddrinfo, return_value=[]) - ): - with self.assertRaisesRegex( - salt.exceptions.SaltSystemExit, - "DNS lookup or connection check of 'foo' failed", - ): - network.dns_check("foo", "1") - - with patch.object( - socket, - "getaddrinfo", - create_autospec(socket.getaddrinfo, side_effect=TypeError), - ): - with self.assertRaisesRegex( - salt.exceptions.SaltSystemExit, "Invalid or unresolveable address" - ): - network.dns_check("foo", "1") - - def test_test_addrs(self): - # subset of real data from getaddrinfo against saltstack.com - addrinfo = [ - (30, 2, 17, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), - (30, 1, 6, "", ("2600:9000:21eb:a800:8:1031:abc0:93a1", 0, 0, 0)), - (30, 2, 17, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), - (30, 1, 6, "", ("2600:9000:21eb:b400:8:1031:abc0:93a1", 0, 0, 0)), - (2, 1, 6, "", ("13.35.99.52", 0)), - (2, 2, 17, "", ("13.35.99.85", 0)), - (2, 1, 6, "", ("13.35.99.85", 0)), - (2, 2, 17, "", ("13.35.99.122", 0)), - ] - with patch("socket.socket", create_autospec(socket.socket)) as s: - # we connect to the first address - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 1) - self.assertTrue(addrs[0] == addrinfo[0][4][0]) - - # the first lookup fails, succeeds on next check - s.side_effect = [socket.error, MagicMock()] - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 1) - self.assertTrue(addrs[0] == addrinfo[2][4][0]) - - # attempt to connect to resolved address with default timeout - s.side_effect = socket.error - addrs = network._test_addrs(addrinfo, 80) - time.sleep(2) - self.assertFalse(len(addrs) == 0) - - # nothing can connect, but we've eliminated duplicates - s.side_effect = socket.error - addrs = network._test_addrs(addrinfo, 80) - self.assertTrue(len(addrs) == 5) - - def test_is_subnet(self): - for subnet_data in (IPV4_SUBNETS, IPV6_SUBNETS): - for item in subnet_data[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_subnet(item)) - for item in subnet_data[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_subnet(item)) - - def test_is_ipv4_subnet(self): - for item in IPV4_SUBNETS[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_ipv4_subnet(item)) - for item in IPV4_SUBNETS[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_ipv4_subnet(item)) - - def test_is_ipv6_subnet(self): - for item in IPV6_SUBNETS[True]: - log.debug("Testing that %s is a valid subnet", item) - self.assertTrue(network.is_ipv6_subnet(item)) - for item in IPV6_SUBNETS[False]: - log.debug("Testing that %s is not a valid subnet", item) - self.assertFalse(network.is_ipv6_subnet(item)) - - def test_cidr_to_ipv4_netmask(self): - self.assertEqual(network.cidr_to_ipv4_netmask(24), "255.255.255.0") - self.assertEqual(network.cidr_to_ipv4_netmask(21), "255.255.248.0") - self.assertEqual(network.cidr_to_ipv4_netmask(17), "255.255.128.0") - self.assertEqual(network.cidr_to_ipv4_netmask(9), "255.128.0.0") - self.assertEqual(network.cidr_to_ipv4_netmask(36), "") - self.assertEqual(network.cidr_to_ipv4_netmask("lol"), "") - - def test_number_of_set_bits_to_ipv4_netmask(self): - set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFFFF00) - self.assertEqual(set_bits_to_netmask, "255.255.255.0") - set_bits_to_netmask = network._number_of_set_bits_to_ipv4_netmask(0xFFFF6400) - - def test_hex2ip(self): - self.assertEqual(network.hex2ip("0x4A7D2B63"), "74.125.43.99") - self.assertEqual(network.hex2ip("0x4A7D2B63", invert=True), "99.43.125.74") - self.assertEqual( - network.hex2ip("00000000000000000000FFFF7F000001"), "127.0.0.1" - ) - self.assertEqual( - network.hex2ip("0000000000000000FFFF00000100007F", invert=True), "127.0.0.1" - ) - self.assertEqual( - network.hex2ip("20010DB8000000000000000000000000"), "2001:db8::" - ) - self.assertEqual( - network.hex2ip("B80D0120000000000000000000000000", invert=True), - "2001:db8::", - ) - - def test_interfaces_ifconfig_linux(self): - interfaces = network._interfaces_ifconfig(LINUX) - self.assertEqual( - interfaces, - { - "eth0": { - "hwaddr": "e0:3f:49:85:6a:af", - "inet": [ - { - "address": "10.10.10.56", - "broadcast": "10.10.10.255", - "netmask": "255.255.252.0", - } - ], - "inet6": [ - { - "address": "fe80::e23f:49ff:fe85:6aaf", - "prefixlen": "64", - "scope": "link", - } - ], - "up": True, - }, - "lo": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [{"address": "::1", "prefixlen": "128", "scope": "host"}], - "up": True, - }, - }, - ) - - def test_interfaces_ifconfig_freebsd(self): - interfaces = network._interfaces_ifconfig(FREEBSD) - self.assertEqual( - interfaces, - { - "": {"up": False}, - "em0": { - "hwaddr": "00:30:48:ff:ff:ff", - "inet": [ - { - "address": "10.10.10.250", - "broadcast": "10.10.10.255", - "netmask": "255.255.255.224", - }, - { - "address": "10.10.10.56", - "broadcast": "10.10.10.63", - "netmask": "255.255.255.192", - }, - ], - "up": True, - }, - "em1": {"hwaddr": "00:30:48:aa:aa:aa", "up": False}, - "lo0": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [ - {"address": "fe80::1", "prefixlen": "64", "scope": "0x8"}, - {"address": "::1", "prefixlen": "128", "scope": None}, - ], - "up": True, - }, - "plip0": {"up": False}, - "tun0": { - "inet": [{"address": "10.12.0.1", "netmask": "255.255.255.255"}], - "up": True, - }, - }, - ) - - def test_interfaces_ifconfig_solaris(self): - with patch("salt.utils.platform.is_sunos", lambda: True): - interfaces = network._interfaces_ifconfig(SOLARIS) - expected_interfaces = { - "ilbint0": { - "inet6": [], - "inet": [ - { - "broadcast": "10.6.0.255", - "netmask": "255.255.255.0", - "address": "10.6.0.11", - } - ], - "up": True, - }, - "lo0": { - "inet6": [{"prefixlen": "128", "address": "::1"}], - "inet": [{"netmask": "255.0.0.0", "address": "127.0.0.1"}], - "up": True, - }, - "ilbext0": { - "inet6": [], - "inet": [ - { - "broadcast": "10.10.11.31", - "netmask": "255.255.255.224", - "address": "10.10.11.11", - }, - { - "broadcast": "10.10.11.31", - "netmask": "255.255.255.224", - "address": "10.10.11.12", - }, - ], - "up": True, - }, - "vpn0": { - "inet6": [], - "inet": [{"netmask": "255.0.0.0", "address": "10.6.0.14"}], - "up": True, - }, - "net0": { - "inet6": [ - {"prefixlen": "10", "address": "fe80::221:9bff:fefd:2a22"} - ], - "inet": [ - { - "broadcast": "10.10.10.63", - "netmask": "255.255.255.224", - "address": "10.10.10.38", - } - ], - "up": True, - }, - } - self.assertEqual(interfaces, expected_interfaces) - - def test_interfaces_ifconfig_netbsd(self): - interfaces = network._netbsd_interfaces_ifconfig(NETBSD) - self.assertEqual( - interfaces, - { - "lo0": { - "inet": [{"address": "127.0.0.1", "netmask": "255.0.0.0"}], - "inet6": [ - {"address": "fe80::1", "prefixlen": "64", "scope": "lo0"} - ], - "up": True, - }, - "vioif0": { - "hwaddr": "00:a0:98:e6:83:18", - "inet": [ - { - "address": "192.168.1.80", - "broadcast": "192.168.1.255", - "netmask": "255.255.255.0", - } - ], - "inet6": [ - { - "address": "fe80::2a0:98ff:fee6:8318", - "prefixlen": "64", - "scope": "vioif0", - } - ], - "up": True, - }, - }, - ) - - def test_freebsd_remotes_on(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_freebsd", lambda: True): - with patch("subprocess.check_output", return_value=FREEBSD_SOCKSTAT): - remotes = network._freebsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_freebsd_remotes_on_with_fat_pid(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_freebsd", lambda: True): - with patch( - "subprocess.check_output", - return_value=FREEBSD_SOCKSTAT_WITH_FAT_PID, - ): - remotes = network._freebsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_netlink_tool_remote_on_a(self): - with patch("salt.utils.platform.is_sunos", lambda: False): - with patch("salt.utils.platform.is_linux", lambda: True): - with patch( - "subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT - ): - remotes = network._netlink_tool_remote_on("4506", "local_port") - self.assertEqual(remotes, {"192.168.122.177", "::ffff:127.0.0.1"}) - - def test_netlink_tool_remote_on_b(self): - with patch("subprocess.check_output", return_value=LINUX_NETLINK_SS_OUTPUT): - remotes = network._netlink_tool_remote_on("4505", "remote_port") - self.assertEqual(remotes, {"127.0.0.1", "::ffff:1.2.3.4"}) - - def test_openbsd_remotes_on(self): - with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT): - remotes = network._openbsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_openbsd_remotes_on_issue_61966(self): - """ - Test that the command output is correctly converted to string before - treating it as such - """ - with patch("subprocess.check_output", return_value=OPENBSD_NETSTAT.encode()): - remotes = network._openbsd_remotes_on("4506", "remote") - self.assertEqual(remotes, {"127.0.0.1"}) - - def test_generate_minion_id_distinct(self): - """ - Test if minion IDs are distinct in the pool. - - :return: - """ - with patch("platform.node", MagicMock(return_value="nodename")), patch( - "socket.gethostname", MagicMock(return_value="hostname") - ), patch( - "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - [ - "hostname.domainname.blank", - "nodename", - "hostname", - "1.2.3.4", - "5.6.7.8", - ], - ) - - def test_generate_minion_id_127_name(self): - """ - Test if minion IDs can be named 127.foo - - :return: - """ - with patch("platform.node", MagicMock(return_value="127")), patch( - "socket.gethostname", MagicMock(return_value="127") - ), patch( - "socket.getfqdn", MagicMock(return_value="127.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - ["127.domainname.blank", "127", "1.2.3.4", "5.6.7.8"], - ) - - def test_generate_minion_id_127_name_startswith(self): - """ - Test if minion IDs can be named starting from "127" - - :return: - """ - with patch("platform.node", MagicMock(return_value="127890")), patch( - "socket.gethostname", MagicMock(return_value="127890") - ), patch( - "socket.getfqdn", MagicMock(return_value="127890.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "attrname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "5.6.7.8"]), - ): - self.assertEqual( - network._generate_minion_id(), - ["127890.domainname.blank", "127890", "1.2.3.4", "5.6.7.8"], - ) - - def test_generate_minion_id_duplicate(self): - """ - Test if IP addresses in the minion IDs are distinct in the pool - - :return: - """ - with patch("platform.node", MagicMock(return_value="hostname")), patch( - "socket.gethostname", MagicMock(return_value="hostname") - ), patch("socket.getfqdn", MagicMock(return_value="hostname")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual(network._generate_minion_id(), ["hostname", "1.2.3.4"]) - - def test_generate_minion_id_platform_used(self): - """ - Test if platform.node is used for the first occurrence. - The platform.node is most common hostname resolver before anything else. - - :return: - """ - with patch( - "platform.node", MagicMock(return_value="very.long.and.complex.domain.name") - ), patch("socket.gethostname", MagicMock(return_value="hostname")), patch( - "socket.getfqdn", MagicMock(return_value="") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual( - network.generate_minion_id(), "very.long.and.complex.domain.name" - ) - - def test_generate_minion_id_platform_localhost_filtered(self): - """ - Test if localhost is filtered from the first occurrence. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="pick.me") - ), patch( - "socket.getfqdn", MagicMock(return_value="hostname.domainname.blank") - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["1.2.3.4", "1.2.3.4", "1.2.3.4"]), - ): - self.assertEqual(network.generate_minion_id(), "hostname.domainname.blank") - - def test_generate_minion_id_platform_localhost_filtered_all(self): - """ - Test if any of the localhost is filtered from everywhere. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock( - return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"] - ), - ): - self.assertEqual(network.generate_minion_id(), "1.2.3.4") - - def test_generate_minion_id_platform_localhost_only(self): - """ - Test if there is no other choice but localhost. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "localhost") - - def test_generate_minion_id_platform_fqdn(self): - """ - Test if fqdn is picked up. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="pick.me")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "pick.me") - - def test_generate_minion_id_platform_localhost_addrinfo(self): - """ - Test if addinfo is picked up. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "pick.me", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock(return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1"]), - ): - self.assertEqual(network.generate_minion_id(), "pick.me") - - def test_generate_minion_id_platform_ip_addr_only(self): - """ - Test if IP address is the only what is used as a Minion ID in case no DNS name. - - :return: - """ - with patch("platform.node", MagicMock(return_value="localhost")), patch( - "socket.gethostname", MagicMock(return_value="ip6-loopback") - ), patch("socket.getfqdn", MagicMock(return_value="ip6-localhost")), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "localhost", ("127.0.1.1", 0))]), - ), patch( - "salt.utils.files.fopen", mock_open() - ), patch( - "salt.utils.network.ip_addrs", - MagicMock( - return_value=["127.0.0.1", "::1", "fe00::0", "fe02::1", "1.2.3.4"] - ), - ): - self.assertEqual(network.generate_minion_id(), "1.2.3.4") - - def test_gen_mac(self): - with patch("random.randint", return_value=1) as random_mock: - self.assertEqual(random_mock.return_value, 1) - ret = network.gen_mac("00:16:3E") - expected_mac = "00:16:3E:01:01:01" - self.assertEqual(ret, expected_mac) - - def test_mac_str_to_bytes(self): - self.assertRaises(ValueError, network.mac_str_to_bytes, "31337") - self.assertRaises(ValueError, network.mac_str_to_bytes, "0001020304056") - self.assertRaises(ValueError, network.mac_str_to_bytes, "00:01:02:03:04:056") - self.assertRaises(ValueError, network.mac_str_to_bytes, "a0:b0:c0:d0:e0:fg") - self.assertEqual( - b"\x10\x08\x06\x04\x02\x00", network.mac_str_to_bytes("100806040200") - ) - self.assertEqual( - b"\xf8\xe7\xd6\xc5\xb4\xa3", network.mac_str_to_bytes("f8e7d6c5b4a3") - ) - - @pytest.mark.slow_test - def test_generate_minion_id_with_long_hostname(self): - """ - Validate the fix for: - - https://github.com/saltstack/salt/issues/51160 - """ - long_name = "localhost-abcdefghijklmnopqrstuvwxyz-abcdefghijklmnopqrstuvwxyz" - with patch("socket.gethostname", MagicMock(return_value=long_name)): - # An exception is raised if unicode is passed to socket.getfqdn - minion_id = network.generate_minion_id() - assert minion_id != "", minion_id - - def test_filter_by_networks_with_no_filter(self): - ips = ["10.0.123.200", "10.10.10.10"] - with pytest.raises(TypeError): - network.filter_by_networks(ips) # pylint: disable=no-value-for-parameter - - def test_filter_by_networks_empty_filter(self): - ips = ["10.0.123.200", "10.10.10.10"] - assert network.filter_by_networks(ips, []) == [] - - def test_filter_by_networks_ips_list(self): - ips = [ - "10.0.123.200", - "10.10.10.10", - "193.124.233.5", - "fe80::d210:cf3f:64e7:5423", - ] - networks = ["10.0.0.0/8", "fe80::/64"] - assert network.filter_by_networks(ips, networks) == [ - "10.0.123.200", - "10.10.10.10", - "fe80::d210:cf3f:64e7:5423", - ] - - def test_filter_by_networks_interfaces_dict(self): - interfaces = { - "wlan0": ["192.168.1.100", "217.5.140.67", "2001:db8::ff00:42:8329"], - "eth0": [ - "2001:0DB8:0:CD30:123:4567:89AB:CDEF", - "192.168.1.101", - "10.0.123.201", - ], - } - assert network.filter_by_networks( - interfaces, ["192.168.1.0/24", "2001:db8::/48"] - ) == { - "wlan0": ["192.168.1.100", "2001:db8::ff00:42:8329"], - "eth0": ["2001:0DB8:0:CD30:123:4567:89AB:CDEF", "192.168.1.101"], - } - - def test_filter_by_networks_catch_all(self): - ips = [ - "10.0.123.200", - "10.10.10.10", - "193.124.233.5", - "fe80::d210:cf3f:64e7:5423", - ] - assert ips == network.filter_by_networks(ips, ["0.0.0.0/0", "::/0"]) - - def test_ip_networks(self): - # We don't need to test with each platform's ifconfig/iproute2 output, - # since this test isn't testing getting the interfaces. We already have - # tests for that. - interface_data = network._interfaces_ifconfig(LINUX) - - # Without loopback - ret = network.ip_networks(interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, specific interface - ret = network.ip_networks(interface="eth0", interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, multiple specific interfaces - ret = network.ip_networks(interface="eth0,lo", interface_data=interface_data) - assert ret == ["10.10.8.0/22"], ret - # Without loopback, specific interface (not present) - ret = network.ip_networks(interface="eth1", interface_data=interface_data) - assert ret == [], ret - # With loopback - ret = network.ip_networks(include_loopback=True, interface_data=interface_data) - assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret - # With loopback, specific interface - ret = network.ip_networks( - interface="eth0", include_loopback=True, interface_data=interface_data - ) - assert ret == ["10.10.8.0/22"], ret - # With loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", include_loopback=True, interface_data=interface_data - ) - assert ret == ["10.10.8.0/22", "127.0.0.0/8"], ret - # With loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", include_loopback=True, interface_data=interface_data - ) - assert ret == [], ret - - # Verbose, without loopback - ret = network.ip_networks(verbose=True, interface_data=interface_data) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, specific interface - ret = network.ip_networks( - interface="eth0", verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, without loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", verbose=True, interface_data=interface_data - ) - assert ret == {}, ret - # Verbose, with loopback - ret = network.ip_networks( - include_loopback=True, verbose=True, interface_data=interface_data - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - "127.0.0.0/8": { - "prefixlen": 8, - "netmask": "255.0.0.0", - "num_addresses": 16777216, - "address": "127.0.0.0", - }, - }, ret - # Verbose, with loopback, specific interface - ret = network.ip_networks( - interface="eth0", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - }, ret - # Verbose, with loopback, multiple specific interfaces - ret = network.ip_networks( - interface="eth0,lo", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "10.10.8.0/22": { - "prefixlen": 22, - "netmask": "255.255.252.0", - "num_addresses": 1024, - "address": "10.10.8.0", - }, - "127.0.0.0/8": { - "prefixlen": 8, - "netmask": "255.0.0.0", - "num_addresses": 16777216, - "address": "127.0.0.0", - }, - }, ret - # Verbose, with loopback, specific interface (not present) - ret = network.ip_networks( - interface="eth1", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == {}, ret - - def test_ip_networks6(self): - # We don't need to test with each platform's ifconfig/iproute2 output, - # since this test isn't testing getting the interfaces. We already have - # tests for that. - interface_data = network._interfaces_ifconfig(LINUX) - - # Without loopback - ret = network.ip_networks6(interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, specific interface - ret = network.ip_networks6(interface="eth0", interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, multiple specific interfaces - ret = network.ip_networks6(interface="eth0,lo", interface_data=interface_data) - assert ret == ["fe80::/64"], ret - # Without loopback, specific interface (not present) - ret = network.ip_networks6(interface="eth1", interface_data=interface_data) - assert ret == [], ret - # With loopback - ret = network.ip_networks6(include_loopback=True, interface_data=interface_data) - assert ret == ["::1/128", "fe80::/64"], ret - # With loopback, specific interface - ret = network.ip_networks6( - interface="eth0", include_loopback=True, interface_data=interface_data - ) - assert ret == ["fe80::/64"], ret - # With loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", include_loopback=True, interface_data=interface_data - ) - assert ret == ["::1/128", "fe80::/64"], ret - # With loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", include_loopback=True, interface_data=interface_data - ) - assert ret == [], ret - - # Verbose, without loopback - ret = network.ip_networks6(verbose=True, interface_data=interface_data) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, specific interface - ret = network.ip_networks6( - interface="eth0", verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, without loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", verbose=True, interface_data=interface_data - ) - assert ret == {}, ret - # Verbose, with loopback - ret = network.ip_networks6( - include_loopback=True, verbose=True, interface_data=interface_data - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - "::1/128": { - "prefixlen": 128, - "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", - "num_addresses": 1, - "address": "::1", - }, - }, ret - # Verbose, with loopback, specific interface - ret = network.ip_networks6( - interface="eth0", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - }, ret - # Verbose, with loopback, multiple specific interfaces - ret = network.ip_networks6( - interface="eth0,lo", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == { - "fe80::/64": { - "prefixlen": 64, - "netmask": "ffff:ffff:ffff:ffff::", - "num_addresses": 18446744073709551616, - "address": "fe80::", - }, - "::1/128": { - "prefixlen": 128, - "netmask": "ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff", - "num_addresses": 1, - "address": "::1", - }, - }, ret - # Verbose, with loopback, specific interface (not present) - ret = network.ip_networks6( - interface="eth1", - include_loopback=True, - verbose=True, - interface_data=interface_data, - ) - assert ret == {}, ret - - def test_get_fqhostname_return(self): - """ - Test if proper hostname is used when RevDNS differ from hostname - - :return: - """ - with patch("socket.gethostname", MagicMock(return_value="hostname")), patch( - "socket.getfqdn", - MagicMock(return_value="very.long.and.complex.domain.name"), - ), patch( - "socket.getaddrinfo", - MagicMock(return_value=[(2, 3, 0, "hostname", ("127.0.1.1", 0))]), - ): - self.assertEqual(network.get_fqhostname(), "hostname") - - def test_get_fqhostname_return_empty_hostname(self): - """ - Test if proper hostname is used when hostname returns empty string - """ - host = "hostname" - with patch("socket.gethostname", MagicMock(return_value=host)), patch( - "socket.getfqdn", - MagicMock(return_value="very.long.and.complex.domain.name"), - ), patch( - "socket.getaddrinfo", - MagicMock( - return_value=[ - (2, 3, 0, host, ("127.0.1.1", 0)), - (2, 3, 0, "", ("127.0.1.1", 0)), - ] - ), - ): - self.assertEqual(network.get_fqhostname(), host) - - def test_ip_bracket(self): - test_ipv4 = "127.0.0.1" - test_ipv6 = "::1" - test_ipv6_uri = "[::1]" - self.assertEqual(test_ipv4, network.ip_bracket(test_ipv4)) - self.assertEqual(test_ipv6, network.ip_bracket(test_ipv6_uri, strip=True)) - self.assertEqual("[{}]".format(test_ipv6), network.ip_bracket(test_ipv6)) - self.assertEqual("[{}]".format(test_ipv6), network.ip_bracket(test_ipv6_uri)) - - ip_addr_obj = ipaddress.ip_address(test_ipv4) - self.assertEqual(test_ipv4, network.ip_bracket(ip_addr_obj)) diff --git a/tests/unit/utils/test_parsers.py b/tests/unit/utils/test_parsers.py deleted file mode 100644 index 06e75d5d7a75..000000000000 --- a/tests/unit/utils/test_parsers.py +++ /dev/null @@ -1,1283 +0,0 @@ -""" - :codeauthor: Denys Havrysh -""" - -import logging -import os -import pprint -import shutil -import tempfile - -import salt._logging -import salt.config -import salt.syspaths -import salt.utils.jid -import salt.utils.parsers -import salt.utils.platform -from tests.support.helpers import TstSuiteLoggingHandler -from tests.support.mock import ANY, MagicMock, patch -from tests.support.runtests import RUNTIME_VARS -from tests.support.unit import TestCase - -log = logging.getLogger(__name__) - - -class ErrorMock: # pylint: disable=too-few-public-methods - """ - Error handling - """ - - def __init__(self): - """ - init - """ - self.msg = None - - def error(self, msg): - """ - Capture error message - """ - self.msg = msg - - -class LogImplMock: - """ - Logger setup - """ - - def __init__(self): - """ - init - """ - self.log_level_console = None - self.log_file = None - self.log_level_logfile = None - self.config = self.original_config = None - logging_options = salt._logging.get_logging_options_dict() - if logging_options: - self.config = logging_options.copy() - self.original_config = self.config.copy() - self.temp_log_level = None - self._console_handler_configured = False - self._extended_logging_configured = False - self._logfile_handler_configured = False - self._real_set_logging_options_dict = salt._logging.set_logging_options_dict - self._real_get_logging_options_dict = salt._logging.get_logging_options_dict - self._real_setup_logfile_handler = salt._logging.setup_logfile_handler - - def _destroy(self): - salt._logging.set_logging_options_dict.__options_dict__ = self.original_config - salt._logging.shutdown_logfile_handler() - - def setup_temp_handler(self, log_level=None): - """ - Set temp handler loglevel - """ - log.debug("Setting temp handler log level to: %s", log_level) - self.temp_log_level = log_level - - def is_console_handler_configured(self): - log.debug("Calling is_console_handler_configured") - return self._console_handler_configured - - def setup_console_handler( - self, log_level="error", **kwargs - ): # pylint: disable=unused-argument - """ - Set console loglevel - """ - log.debug("Setting console handler log level to: %s", log_level) - self.log_level_console = log_level - self._console_handler_configured = True - - def shutdown_console_handler(self): - log.debug("Calling shutdown_console_handler") - self._console_handler_configured = False - - def is_extended_logging_configured(self): - log.debug("Calling is_extended_logging_configured") - return self._extended_logging_configured - - def setup_extended_logging(self, opts): - """ - Set opts - """ - log.debug("Calling setup_extended_logging") - self._extended_logging_configured = True - - def shutdown_extended_logging(self): - log.debug("Calling shutdown_extended_logging") - self._extended_logging_configured = False - - def is_logfile_handler_configured(self): - log.debug("Calling is_logfile_handler_configured") - return self._logfile_handler_configured - - def setup_logfile_handler( - self, log_path, log_level=None, **kwargs - ): # pylint: disable=unused-argument - """ - Set logfile and loglevel - """ - log.debug("Setting log file handler path to: %s", log_path) - log.debug("Setting log file handler log level to: %s", log_level) - self.log_file = log_path - self.log_level_logfile = log_level - self._real_setup_logfile_handler(log_path, log_level=log_level, **kwargs) - self._logfile_handler_configured = True - - def shutdown_logfile_handler(self): - log.debug("Calling shutdown_logfile_handler") - self._logfile_handler_configured = False - - def get_logging_options_dict(self): - log.debug("Calling get_logging_options_dict") - return self.config - - def set_logging_options_dict(self, opts): - log.debug("Calling set_logging_options_dict") - self._real_set_logging_options_dict(opts) - self.config = self._real_get_logging_options_dict() - log.debug("Logging options dict:\n%s", pprint.pformat(self.config)) - - def setup_log_granular_levels(self, opts): - log.debug("Calling setup_log_granular_levels") - - def setup_logging(self): - log.debug("Mocked setup_logging called") - # Wether daemonizing or not, either on the main process or on a separate process - # The log file is going to be configured. - # The console is the only handler not configured if daemonizing - - # These routines are what happens on salt._logging.setup_logging - opts = self.get_logging_options_dict() - - if ( - opts.get("configure_console_logger", True) - and not self.is_console_handler_configured() - ): - self.setup_console_handler( - log_level=opts["log_level"], - log_format=opts["log_fmt_console"], - date_format=opts["log_datefmt"], - ) - if ( - opts.get("configure_file_logger", True) - and not self.is_logfile_handler_configured() - ): - log_file_level = opts["log_level_logfile"] or opts["log_level"] - if log_file_level != "quiet": - self.setup_logfile_handler( - log_path=opts[opts["log_file_key"]], - log_level=log_file_level, - log_format=opts["log_fmt_logfile"], - date_format=opts["log_datefmt_logfile"], - max_bytes=opts["log_rotate_max_bytes"], - backup_count=opts["log_rotate_backup_count"], - user=opts["user"], - ) - if not self.is_extended_logging_configured(): - self.setup_extended_logging(opts) - self.setup_log_granular_levels(opts["log_granular_levels"]) - - -class ObjectView: # pylint: disable=too-few-public-methods - """ - Dict object view - """ - - def __init__(self, d): - self.__dict__ = d - - -class ParserBase: - """ - Unit Tests for Log Level Mixin with Salt parsers - """ - - args = [] - - log_impl = None - - # Set config option names - loglevel_config_setting_name = "log_level" - logfile_config_setting_name = "log_file" - logfile_loglevel_config_setting_name = ( - "log_level_logfile" # pylint: disable=invalid-name - ) - - @classmethod - def setUpClass(cls): - cls.root_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) - - @classmethod - def tearDownClass(cls): - shutil.rmtree(cls.root_dir, ignore_errors=True) - - def setup_log(self): - """ - Mock logger functions - """ - testing_config = self.default_config.copy() - testing_config["root_dir"] = self.root_dir - for name in ("pki_dir", "cachedir"): - testing_config[name] = name - testing_config[self.logfile_config_setting_name] = getattr( - self, self.logfile_config_setting_name, self.log_file - ) - self.testing_config = testing_config - self.addCleanup(setattr, self, "testing_config", None) - - self.log_impl = LogImplMock() - self.addCleanup(self.log_impl._destroy) - self.addCleanup(setattr, self, "log_impl", None) - - mocked_functions = {} - for name in dir(self.log_impl): - if name.startswith("_"): - continue - func = getattr(self.log_impl, name) - if not callable(func): - continue - mocked_functions[name] = func - patcher = patch.multiple(salt._logging, **mocked_functions) - patcher.start() - self.addCleanup(patcher.stop) - - # log level configuration tests - - def test_get_log_level_cli(self): - """ - Tests that log level match command-line specified value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log level in CLI - log_level = "critical" - args = ["--log-level", log_level] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check console log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, log_level) - # Check log file logger log level - self.assertEqual(self.log_impl.log_level_logfile, default_log_level) - - def test_get_log_level_config(self): - """ - Tests that log level match the configured value - """ - args = self.args - - # Set log level in config - log_level = "info" - opts = self.testing_config.copy() - opts.update({self.loglevel_config_setting_name: log_level}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check console log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file logger log level - self.assertEqual(self.log_impl.log_level_logfile, log_level) - - def test_get_log_level_default(self): - """ - Tests that log level match the default value - """ - # Set defaults - log_level = default_log_level = self.testing_config[ - self.loglevel_config_setting_name - ] - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - console_log_level = getattr(parser.options, self.loglevel_config_setting_name) - - # Check log level setting - self.assertEqual(console_log_level, log_level) - # Check console logger log level - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, default_log_level) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_level), - parser.get_option("--log-level").help, - ) - - # log file configuration tests - - def test_get_log_file_cli(self): - """ - Tests that log file match command-line specified value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log file in CLI - log_file = "{}_cli.log".format(self.log_file) - args = ["--log-file", log_file] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - - def test_get_log_file_config(self): - """ - Tests that log file match the configured value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - args = self.args - - # Set log file in config - log_file = "{}_config.log".format(self.log_file) - opts = self.testing_config.copy() - opts.update({self.logfile_config_setting_name: log_file}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - - def test_get_log_file_default(self): - """ - Tests that log file match the default value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - log_file = self.testing_config[self.logfile_config_setting_name] - default_log_file = self.default_config[self.logfile_config_setting_name] - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_file_option = getattr(parser.options, self.logfile_config_setting_name) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_config_setting_name], log_file - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file setting - self.assertEqual(log_file_option, log_file) - # Check log file logger - self.assertEqual(self.log_impl.log_file, log_file) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_file), - parser.get_option("--log-file").help, - ) - - # log file log level configuration tests - - def test_get_log_file_level_cli(self): - """ - Tests that file log level match command-line specified value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - # Set log file level in CLI - log_level_logfile = "error" - args = ["--log-file-level", log_level_logfile] + self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, default_log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], - default_log_level, - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_get_log_file_level_config(self): - """ - Tests that log file level match the configured value - """ - # Set defaults - log_level = self.testing_config[self.loglevel_config_setting_name] - - args = self.args - - # Set log file level in config - log_level_logfile = "info" - opts = self.testing_config.copy() - opts.update({self.logfile_loglevel_config_setting_name: log_level_logfile}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_get_log_file_level_default(self): - """ - Tests that log file level match the default value - """ - # Set defaults - default_log_level = self.testing_config[self.loglevel_config_setting_name] - - log_level = default_log_level - log_level_logfile = default_log_level - - args = self.args - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - # Check help message - self.assertIn( - "Default: '{}'.".format(default_log_level), - parser.get_option("--log-file-level").help, - ) - - def test_get_console_log_level_with_file_log_level( - self, - ): # pylint: disable=invalid-name - """ - Tests that both console log level and log file level setting are working together - """ - log_level = "critical" - log_level_logfile = "debug" - - args = ["--log-file-level", log_level_logfile] + self.args - - opts = self.testing_config.copy() - opts.update({self.loglevel_config_setting_name: log_level}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - log_level_logfile_option = getattr( - parser.options, self.logfile_loglevel_config_setting_name - ) - - # Check console logger - self.assertEqual(self.log_impl.log_level_console, log_level) - # Check extended logger - self.assertEqual( - self.log_impl.config[self.loglevel_config_setting_name], log_level - ) - self.assertEqual( - self.log_impl.config[self.logfile_loglevel_config_setting_name], - log_level_logfile, - ) - # Check temp logger - self.assertEqual(self.log_impl.temp_log_level, "error") - # Check log file level setting - self.assertEqual(log_level_logfile_option, log_level_logfile) - # Check log file logger - self.assertEqual(self.log_impl.log_level_logfile, log_level_logfile) - - def test_log_created(self): - """ - Tests that log file is created - """ - args = self.args - log_file = self.log_file - log_file_name = self.logfile_config_setting_name - opts = self.testing_config.copy() - opts.update({"log_file": log_file}) - if log_file_name != "log_file": - opts.update({log_file_name: getattr(self, log_file_name)}) - - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=opts)): - parser.parse_args(args) - - if log_file_name == "log_file": - self.assertGreaterEqual(os.path.getsize(log_file), 0) - else: - self.assertGreaterEqual(os.path.getsize(getattr(self, log_file_name)), 0) - - def test_callbacks_uniqueness(self): - """ - Test that the callbacks are only added once, no matter - how many instances of the parser we create - """ - mixin_container_names = ( - "_mixin_setup_funcs", - "_mixin_process_funcs", - "_mixin_after_parsed_funcs", - "_mixin_before_exit_funcs", - ) - parser = self.parser() - nums_1 = {} - for cb_container in mixin_container_names: - obj = getattr(parser, cb_container) - nums_1[cb_container] = len(obj) - - # The next time we instantiate the parser, the counts should be equal - parser = self.parser() - nums_2 = {} - for cb_container in mixin_container_names: - obj = getattr(parser, cb_container) - nums_2[cb_container] = len(obj) - self.assertDictEqual(nums_1, nums_2) - - def test_verify_log_warning_logged(self): - args = ["--log-level", "debug"] + self.args - with TstSuiteLoggingHandler(level=logging.DEBUG) as handler: - parser = self.parser() - with patch(self.config_func, MagicMock(return_value=self.testing_config)): - parser.parse_args(args) - self.assertIn( - "WARNING:Insecure logging configuration detected! Sensitive data may be logged.", - handler.messages, - ) - - -class MasterOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_master_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.MasterOptionParser - self.addCleanup(delattr, self, "parser") - - -class MinionOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_minion_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.minion_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.MinionOptionParser - self.addCleanup(delattr, self, "parser") - - -class ProxyMinionOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Proxy Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_PROXY_MINION_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_proxy_minion_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.proxy_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.ProxyMinionOptionParser - self.addCleanup(delattr, self, "parser") - - -class SyndicOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Syndic options - """ - - def setUp(self): - """ - Setting up - """ - # Set config option names - self.logfile_config_setting_name = "syndic_log_file" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_syndic_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - syndic_log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_syndic_log", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.syndic_log_file = syndic_log_file.name - syndic_log_file.close() - # Function to patch - self.config_func = "salt.config.syndic_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SyndicOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCMDOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt CLI options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar.baz"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cmd_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.client_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCMDOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCPOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing salt-cp options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar", "baz"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cp_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCPOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltKeyOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing salt-key options - """ - - def setUp(self): - """ - Setting up - """ - # Set config option names - self.logfile_config_setting_name = "key_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_key_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - key_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_key_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.key_logfile = key_logfile.name - key_logfile.close() - # Function to patch - self.config_func = "salt.config.client_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltKeyOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCallOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Minion options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo.bar"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MINION_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_call_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.minion_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCallOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltRunOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo.bar"] - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_run_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltRunOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltSSHOptionParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Master options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar.baz"] - - # Set config option names - self.logfile_config_setting_name = "ssh_log_file" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_ssh_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - ssh_log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_ssh_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.ssh_log_file = ssh_log_file.name - ssh_log_file.close() - # Function to patch - self.config_func = "salt.config.master_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltSSHOptionParser - self.addCleanup(delattr, self, "parser") - - -class SaltCloudParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["-p", "foo", "bar"] - - # Set default configs - # Cloud configs are merged with master configs in - # config/__init__.py, so we'll do that here as well - # As we need the 'user' key later on. - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_CLOUD_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_cloud_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - # Function to patch - self.config_func = "salt.config.cloud_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltCloudParser - self.addCleanup(delattr, self, "parser") - - -class SPMParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = ["foo", "bar"] - - # Set config option names - self.logfile_config_setting_name = "spm_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_SPM_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_spm_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - spm_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_spm_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.spm_logfile = spm_logfile.name - spm_logfile.close() - # Function to patch - self.config_func = "salt.config.spm_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SPMParser - self.addCleanup(delattr, self, "parser") - - -class SaltAPIParserTestCase(ParserBase, TestCase): - """ - Tests parsing Salt Cloud options - """ - - def setUp(self): - """ - Setting up - """ - # Set mandatory CLI options - self.args = [] - - # Set config option names - self.logfile_config_setting_name = "api_logfile" - - # Set defaults - self.default_config = salt.config.DEFAULT_MASTER_OPTS.copy() - self.default_config.update(salt.config.DEFAULT_API_OPTS) - self.addCleanup(delattr, self, "default_config") - - # Log file - # We need to use NamedTemporaryFile because Windows won't allow deleting - # the log file even after it has been closed: WindowsError 32 - log_file = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_salt_api_parser_test", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.log_file = log_file.name - log_file.close() - api_logfile = tempfile.NamedTemporaryFile( - prefix="test_parsers_", - suffix="_api_logfile", - dir=RUNTIME_VARS.TMP, - delete=True, - ) - self.api_logfile = api_logfile.name - api_logfile.close() - # Function to patch - self.config_func = "salt.config.api_config" - - # Mock log setup - self.setup_log() - - # Assign parser - self.parser = salt.utils.parsers.SaltAPIParser - self.addCleanup(delattr, self, "parser") - - -class DaemonMixInTestCase(TestCase): - """ - Tests the PIDfile deletion in the DaemonMixIn. - """ - - def setUp(self): - """ - Setting up - """ - # Setup mixin - self.daemon_mixin = salt.utils.parsers.DaemonMixIn() - self.daemon_mixin.config = {} - self.daemon_mixin.config["pidfile"] = "/some/fake.pid" - - def tearDown(self): - """ - Tear down test - :return: - """ - del self.daemon_mixin - - @patch("os.unlink", MagicMock()) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_file_deletion(self): - """ - PIDfile deletion without exception. - """ - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_not_called() - salt.utils.parsers.log.debug.assert_not_called() - - @patch("os.unlink", MagicMock(side_effect=OSError())) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_deleted_oserror_as_root(self): - """ - PIDfile deletion with exception, running as root. - """ - if salt.utils.platform.is_windows(): - patch_args = ( - "salt.utils.win_functions.is_admin", - MagicMock(return_value=True), - ) - else: - patch_args = ("os.getuid", MagicMock(return_value=0)) - - with patch(*patch_args): - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_called_with( - "PIDfile(%s) could not be deleted: %s", - format(self.daemon_mixin.config["pidfile"], ""), - ANY, - exc_info_on_loglevel=logging.DEBUG, - ) - - @patch("os.unlink", MagicMock(side_effect=OSError())) - @patch("os.path.isfile", MagicMock(return_value=True)) - @patch("salt.utils.parsers.log", MagicMock()) - def test_pid_deleted_oserror_as_non_root(self): - """ - PIDfile deletion with exception, running as non-root. - """ - if salt.utils.platform.is_windows(): - patch_args = ( - "salt.utils.win_functions.is_admin", - MagicMock(return_value=False), - ) - else: - patch_args = ("os.getuid", MagicMock(return_value=1000)) - - with patch(*patch_args): - self.daemon_mixin._mixin_before_exit() - assert salt.utils.parsers.os.unlink.call_count == 1 - salt.utils.parsers.log.info.assert_not_called() - salt.utils.parsers.log.debug.assert_not_called() diff --git a/tests/unit/utils/test_path.py b/tests/unit/utils/test_path.py index bebb9ce284a7..47a108a2f4ee 100644 --- a/tests/unit/utils/test_path.py +++ b/tests/unit/utils/test_path.py @@ -4,6 +4,7 @@ import posixpath import sys import tempfile +import types import pytest @@ -40,14 +41,14 @@ class PathJoinTestCase(TestCase): def test_nix_paths(self): for idx, (parts, expected) in enumerate(self.NIX_PATHS): path = salt.utils.path.join(*parts) - assert "{}: {}".format(idx, path) == "{}: {}".format(idx, expected) + assert f"{idx}: {path}" == f"{idx}: {expected}" @pytest.mark.skip(reason="Skipped until properly mocked") @pytest.mark.skip_unless_on_windows def test_windows_paths(self): for idx, (parts, expected) in enumerate(self.WIN_PATHS): path = salt.utils.path.join(*parts) - assert "{}: {}".format(idx, path) == "{}: {}".format(idx, expected) + assert f"{idx}: {path}" == f"{idx}: {expected}" @pytest.mark.skip(reason="Skipped until properly mocked") @pytest.mark.skip_on_windows @@ -57,7 +58,7 @@ def test_windows_paths_patched_path_module(self): try: for idx, (parts, expected) in enumerate(self.WIN_PATHS): path = salt.utils.path.join(*parts) - assert "{}: {}".format(idx, path) == "{}: {}".format(idx, expected) + assert f"{idx}: {path}" == f"{idx}: {expected}" finally: self.__unpatch_path() @@ -79,14 +80,12 @@ def test_mixed_unicode_and_binary(self): assert actual == expected def __patch_path(self): - import imp - modules = list(self.BUILTIN_MODULES[:]) modules.pop(modules.index("posix")) modules.append("nt") code = """'''Salt unittest loaded NT module'''""" - module = imp.new_module("nt") + module = types.ModuleType("nt") exec(code, module.__dict__) sys.modules["nt"] = module diff --git a/tests/unit/utils/test_rsax931.py b/tests/unit/utils/test_rsax931.py deleted file mode 100644 index 38102b440de0..000000000000 --- a/tests/unit/utils/test_rsax931.py +++ /dev/null @@ -1,278 +0,0 @@ -""" -Test the RSA ANSI X9.31 signer and verifier -""" - -import ctypes -import ctypes.util -import fnmatch -import glob -import os -import platform -import sys - -import pytest - -import salt.utils.platform - -# salt libs -from salt.utils.rsax931 import ( - RSAX931Signer, - RSAX931Verifier, - _find_libcrypto, - _load_libcrypto, -) -from tests.support.mock import patch - -# salt testing libs -from tests.support.unit import TestCase - - -class RSAX931Test(TestCase): - - privkey_data = ( - "-----BEGIN RSA PRIVATE KEY-----\n" - "MIIEpAIBAAKCAQEA75GR6ZTv5JOv90Vq8tKhKC7YQnhDIo2hM0HVziTEk5R4UQBW\n" - "a0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD4ZMsYqLzqjWMekLC8bjhxc+EuPo9\n" - "Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R4hOcMMZNZdi0xLtFoTfwU61UPfFX\n" - "14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttLP3sMXJvc3EvM0JiDVj4l1TWFUHHz\n" - "eFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k6ai4tVzwkTmV5PsriP1ju88Lo3MB\n" - "4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWHAQIDAQABAoIBAGOzBzBYZUWRGOgl\n" - "IY8QjTT12dY/ymC05GM6gMobjxuD7FZ5d32HDLu/QrknfS3kKlFPUQGDAbQhbbb0\n" - "zw6VL5NO9mfOPO2W/3FaG1sRgBQcerWonoSSSn8OJwVBHMFLG3a+U1Zh1UvPoiPK\n" - "S734swIM+zFpNYivGPvOm/muF/waFf8tF/47t1cwt/JGXYQnkG/P7z0vp47Irpsb\n" - "Yjw7vPe4BnbY6SppSxscW3KoV7GtJLFKIxAXbxsuJMF/rYe3O3w2VKJ1Sug1VDJl\n" - "/GytwAkSUer84WwP2b07Wn4c5pCnmLslMgXCLkENgi1NnJMhYVOnckxGDZk54hqP\n" - "9RbLnkkCgYEA/yKuWEvgdzYRYkqpzB0l9ka7Y00CV4Dha9Of6GjQi9i4VCJ/UFVr\n" - "UlhTo5y0ZzpcDAPcoZf5CFZsD90a/BpQ3YTtdln2MMCL/Kr3QFmetkmDrt+3wYnX\n" - "sKESfsa2nZdOATRpl1antpwyD4RzsAeOPwBiACj4fkq5iZJBSI0bxrMCgYEA8GFi\n" - "qAjgKh81/Uai6KWTOW2kX02LEMVRrnZLQ9VPPLGid4KZDDk1/dEfxjjkcyOxX1Ux\n" - "Klu4W8ZEdZyzPcJrfk7PdopfGOfrhWzkREK9C40H7ou/1jUecq/STPfSOmxh3Y+D\n" - "ifMNO6z4sQAHx8VaHaxVsJ7SGR/spr0pkZL+NXsCgYEA84rIgBKWB1W+TGRXJzdf\n" - "yHIGaCjXpm2pQMN3LmP3RrcuZWm0vBt94dHcrR5l+u/zc6iwEDTAjJvqdU4rdyEr\n" - "tfkwr7v6TNlQB3WvpWanIPyVzfVSNFX/ZWSsAgZvxYjr9ixw6vzWBXOeOb/Gqu7b\n" - "cvpLkjmJ0wxDhbXtyXKhZA8CgYBZyvcQb+hUs732M4mtQBSD0kohc5TsGdlOQ1AQ\n" - "McFcmbpnzDghkclyW8jzwdLMk9uxEeDAwuxWE/UEvhlSi6qdzxC+Zifp5NBc0fVe\n" - "7lMx2mfJGxj5CnSqQLVdHQHB4zSXkAGB6XHbBd0MOUeuvzDPfs2voVQ4IG3FR0oc\n" - "3/znuwKBgQChZGH3McQcxmLA28aUwOVbWssfXKdDCsiJO+PEXXlL0maO3SbnFn+Q\n" - "Tyf8oHI5cdP7AbwDSx9bUfRPjg9dKKmATBFr2bn216pjGxK0OjYOCntFTVr0psRB\n" - "CrKg52Qrq71/2l4V2NLQZU40Dr1bN9V+Ftd9L0pvpCAEAWpIbLXGDw==\n" - "-----END RSA PRIVATE KEY-----" - ) - - pubkey_data = ( - "-----BEGIN PUBLIC KEY-----\n" - "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA75GR6ZTv5JOv90Vq8tKh\n" - "KC7YQnhDIo2hM0HVziTEk5R4UQBWa0CKytFMbTONY2msEDwX9iA0x7F5Lgj0X8eD\n" - "4ZMsYqLzqjWMekLC8bjhxc+EuPo9Dygu3mJ2VgRC7XhlFpmdo5NN8J2E7B/CNB3R\n" - "4hOcMMZNZdi0xLtFoTfwU61UPfFX14mV2laqLbvDEfQLJhUTDeFFV8EN5Z4H1ttL\n" - "P3sMXJvc3EvM0JiDVj4l1TWFUHHzeFgCA1Im0lv8i7PFrgW7nyMfK9uDSsUmIp7k\n" - "6ai4tVzwkTmV5PsriP1ju88Lo3MB4/sUmDv/JmlZ9YyzTO3Po8Uz3Aeq9HJWyBWH\n" - "AQIDAQAB\n" - "-----END PUBLIC KEY-----" - ) - - hello_world = b"hello, world" - - hello_world_sig = ( - b"\x63\xa0\x70\xd2\xe4\xd4\x6b\x8a\xa2\x59\x27\x5f\x00\x69" - b"\x1e\x3c\x50\xed\x50\x13\x09\x80\xe3\x47\x4e\x14\xb5\x7c" - b"\x07\x26\x4e\x20\x74\xea\x0e\xf8\xda\xff\x1e\x57\x8c\x67" - b"\x76\x73\xaa\xea\x0f\x0a\xe7\xa2\xe3\x88\xfc\x09\x87\x36" - b"\x01\x3a\xb7\x4c\x40\xe0\xf4\x54\xc5\xf1\xaa\xb2\x1d\x7f" - b"\xb6\xd3\xa8\xdd\x28\x69\x8b\x88\xe4\x42\x1e\x48\x3e\x1f" - b"\xe2\x2b\x3c\x7c\x85\x11\xe9\x59\xd7\xf3\xc2\x21\xd3\x55" - b"\xcb\x9c\x3c\x93\xcc\x20\xdf\x64\x81\xd0\x0d\xbf\x8e\x8d" - b"\x47\xec\x1d\x9e\x27\xec\x12\xed\x8b\x5f\xd6\x1d\xec\x8d" - b"\x77\x5a\x58\x8a\x24\xb6\x0f\x12\xb7\x51\xef\x7d\x85\x0f" - b"\x49\x39\x02\x81\x15\x08\x70\xd6\xe0\x0b\x31\xff\x5f\xf9" - b"\xd1\x92\x38\x59\x8c\x22\x9c\xbb\xbf\xcf\x85\x34\xe2\x47" - b"\xf5\xe2\xaa\xb4\x62\x33\x3c\x13\x78\x33\x87\x08\x9e\xb5" - b"\xbc\x5d\xc1\xbf\x79\x7c\xfa\x5f\x06\x6a\x3b\x17\x40\x09" - b"\xb9\x09\xbf\x32\xc3\x00\xe2\xbc\x91\x77\x14\xa5\x23\xf5" - b"\xf5\xf1\x09\x12\x38\xda\x3b\x6a\x82\x81\x7b\x5e\x1c\xcb" - b"\xaa\x36\x9b\x08\x36\x03\x14\x96\xa3\x31\x39\x59\x16\x75" - b"\xc9\xb6\x66\x94\x1b\x97\xff\xc8\xa1\xe3\x21\x35\x23\x06" - b"\x4c\x9b\xf4\xee" - ) - - def test_signer(self): - with self.assertRaises(ValueError): - signer = RSAX931Signer("bogus key data") - with self.assertRaises(ValueError): - signer = RSAX931Signer(RSAX931Test.pubkey_data) - - signer = RSAX931Signer(RSAX931Test.privkey_data) - with self.assertRaises(ValueError): - signer.sign("x" * 255) # message too long - - sig = signer.sign(RSAX931Test.hello_world) - self.assertEqual(RSAX931Test.hello_world_sig, sig) - - def test_verifier(self): - with self.assertRaises(ValueError): - verifier = RSAX931Verifier("bogus key data") - with self.assertRaises(ValueError): - verifier = RSAX931Verifier(RSAX931Test.privkey_data) - - verifier = RSAX931Verifier(RSAX931Test.pubkey_data) - with self.assertRaises(ValueError): - verifier.verify("") - with self.assertRaises(ValueError): - verifier.verify(RSAX931Test.hello_world_sig + b"junk") - - msg = verifier.verify(RSAX931Test.hello_world_sig) - self.assertEqual(RSAX931Test.hello_world, msg) - - @pytest.mark.skip_unless_on_windows - def test_find_libcrypto_win32(self): - """ - Test _find_libcrypto on Windows hosts. - """ - lib_path = _find_libcrypto() - self.assertEqual(lib_path, "libeay32") - - @pytest.mark.skip_unless_on_smartos - def test_find_libcrypto_smartos(self): - """ - Test _find_libcrypto on a SmartOS host. - """ - lib_path = _find_libcrypto() - self.assertTrue( - fnmatch.fnmatch( - lib_path, os.path.join(os.path.dirname(sys.executable), "libcrypto*") - ) - ) - - @pytest.mark.skip_unless_on_sunos - def test_find_libcrypto_sunos(self): - """ - Test _find_libcrypto on a Solaris-like host. - """ - lib_path = _find_libcrypto() - passed = False - for i in ("/opt/local/lib/libcrypto.so*", "/opt/tools/lib/libcrypto.so*"): - if fnmatch.fnmatch(lib_path, i): - passed = True - break - self.assertTrue(passed) - - @pytest.mark.skip_unless_on_aix - def test_find_libcrypto_aix(self): - """ - Test _find_libcrypto on an IBM AIX host. - """ - lib_path = _find_libcrypto() - if os.path.isdir("/opt/salt/lib"): - self.assertTrue(fnmatch.fnmatch(lib_path, "/opt/salt/lib/libcrypto.so*")) - else: - self.assertTrue( - fnmatch.fnmatch(lib_path, "/opt/freeware/lib/libcrypto.so*") - ) - - @patch.object(salt.utils.platform, "is_darwin", lambda: True) - @patch.object(platform, "mac_ver", lambda: ("10.14.2", (), "")) - @patch.object(glob, "glob", lambda _: []) - @patch.object(sys, "platform", "macosx") - def test_find_libcrypto_with_system_before_catalina(self): - """ - Test _find_libcrypto on a pre-Catalina macOS host by simulating not - finding any other libcryptos and verifying that it defaults to system. - """ - lib_path = _find_libcrypto() - self.assertEqual(lib_path, "/usr/lib/libcrypto.dylib") - - @patch.object(salt.utils.platform, "is_darwin", lambda: True) - @patch.object(platform, "mac_ver", lambda: ("10.15.2", (), "")) - @patch.object(sys, "platform", "macosx") - def test_find_libcrypto_darwin_catalina(self): - """ - Test _find_libcrypto on a macOS Catalina host where there are no custom - libcryptos and defaulting to the versioned system libraries. - """ - available = [ - "/usr/lib/libcrypto.0.9.7.dylib", - "/usr/lib/libcrypto.0.9.8.dylib", - "/usr/lib/libcrypto.35.dylib", - "/usr/lib/libcrypto.41.dylib", - "/usr/lib/libcrypto.42.dylib", - "/usr/lib/libcrypto.44.dylib", - "/usr/lib/libcrypto.dylib", - ] - - def test_glob(pattern): - return [lib for lib in available if fnmatch.fnmatch(lib, pattern)] - - with patch.object(glob, "glob", test_glob): - lib_path = _find_libcrypto() - self.assertEqual("/usr/lib/libcrypto.44.dylib", lib_path) - - @patch.object(salt.utils.platform, "is_darwin", lambda: True) - @patch.object(platform, "mac_ver", lambda: ("11.2.2", (), "")) - @patch.object(sys, "platform", "macosx") - def test_find_libcrypto_darwin_bigsur_packaged(self): - """ - Test _find_libcrypto on a Darwin-like macOS host where there isn't a - lacation returned by ctypes.util.find_library() and the libcrypto - installation comes from a package manager (ports, brew, salt). - """ - managed_paths = { - "salt": "/opt/salt/lib/libcrypto.dylib", - "brew": "/test/homebrew/prefix/opt/openssl/lib/libcrypto.dylib", - "port": "/opt/local/lib/libcrypto.dylib", - } - - saved_getenv = os.getenv - - def mock_getenv(env): - def test_getenv(var, default=None): - return env.get(var, saved_getenv(var, default)) - - return test_getenv - - def mock_glob(expected_lib): - def test_glob(pattern): - if fnmatch.fnmatch(expected_lib, pattern): - return [expected_lib] - return [] - - return test_glob - - for package_manager, expected_lib in managed_paths.items(): - if package_manager == "brew": - env = {"HOMEBREW_PREFIX": "/test/homebrew/prefix"} - else: - env = {"HOMEBREW_PREFIX": ""} - with patch.object(os, "getenv", mock_getenv(env)): - with patch.object(glob, "glob", mock_glob(expected_lib)): - lib_path = _find_libcrypto() - - self.assertEqual(expected_lib, lib_path) - - # On Big Sur, there's nothing else to fall back on. - with patch.object(glob, "glob", lambda _: []): - with self.assertRaises(OSError): - lib_path = _find_libcrypto() - - @patch.object(ctypes.util, "find_library", lambda a: None) - @patch.object(glob, "glob", lambda a: []) - @patch.object(sys, "platform", "unknown") - @patch.object(salt.utils.platform, "is_darwin", lambda: False) - def test_find_libcrypto_unsupported(self): - """ - Ensure that _find_libcrypto works correctly on an unsupported host OS. - """ - with self.assertRaises(OSError): - _find_libcrypto() - - def test_load_libcrypto(self): - """ - Test _load_libcrypto generically. - """ - lib = _load_libcrypto() - self.assertTrue(isinstance(lib, ctypes.CDLL)) - # Try to cover both pre and post OpenSSL 1.1. - self.assertTrue( - hasattr(lib, "OpenSSL_version_num") - or hasattr(lib, "OPENSSL_init_crypto") - or hasattr(lib, "OPENSSL_no_config") - ) diff --git a/tests/unit/utils/test_templates.py b/tests/unit/utils/test_templates.py deleted file mode 100644 index 4ba2f52d7b3e..000000000000 --- a/tests/unit/utils/test_templates.py +++ /dev/null @@ -1,440 +0,0 @@ -""" -Unit tests for salt.utils.templates.py -""" -import logging -import os -import sys -from collections import OrderedDict -from pathlib import PurePath, PurePosixPath - -import pytest - -import salt.utils.files -import salt.utils.templates -from tests.support.helpers import with_tempdir -from tests.support.mock import patch -from tests.support.unit import TestCase - -try: - import Cheetah as _ - - HAS_CHEETAH = True -except ImportError: - HAS_CHEETAH = False - -log = logging.getLogger(__name__) - - -class RenderTestCase(TestCase): - def setUp(self): - # Default context for salt.utils.templates.render_*_tmpl to work - self.context = { - "opts": {"cachedir": "/D", "__cli": "salt"}, - "saltenv": None, - } - - ### Tests for Jinja (whitespace-friendly) - def test_render_jinja_sanity(self): - tmpl = """OK""" - res = salt.utils.templates.render_jinja_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - def test_render_jinja_evaluate(self): - tmpl = """{{ "OK" }}""" - res = salt.utils.templates.render_jinja_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - def test_render_jinja_evaluate_multi(self): - tmpl = """{% if 1 -%}OK{%- endif %}""" - res = salt.utils.templates.render_jinja_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - def test_render_jinja_variable(self): - tmpl = """{{ var }}""" - - ctx = dict(self.context) - ctx["var"] = "OK" - res = salt.utils.templates.render_jinja_tmpl(tmpl, ctx) - self.assertEqual(res, "OK") - - def test_render_jinja_tojson_sorted(self): - templ = """thing: {{ var|tojson(sort_keys=True) }}""" - expected = """thing: {"x": "xxx", "y": "yyy", "z": "zzz"}""" - - with patch.dict(self.context, {"var": {"z": "zzz", "y": "yyy", "x": "xxx"}}): - res = salt.utils.templates.render_jinja_tmpl(templ, self.context) - - assert res == expected - - def test_render_jinja_tojson_unsorted(self): - templ = """thing: {{ var|tojson(sort_keys=False) }}""" - expected = """thing: {"z": "zzz", "x": "xxx", "y": "yyy"}""" - - # Values must be added to the dict in the expected order. This is - # only necessary for older Pythons that don't remember dict order. - d = OrderedDict() - d["z"] = "zzz" - d["x"] = "xxx" - d["y"] = "yyy" - - with patch.dict(self.context, {"var": d}): - res = salt.utils.templates.render_jinja_tmpl(templ, self.context) - - assert res == expected - - ### Tests for mako template - def test_render_mako_sanity(self): - tmpl = """OK""" - res = salt.utils.templates.render_mako_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - def test_render_mako_evaluate(self): - tmpl = """${ "OK" }""" - res = salt.utils.templates.render_mako_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - def test_render_mako_evaluate_multi(self): - tmpl = """ - % if 1: - OK - % endif - """ - res = salt.utils.templates.render_mako_tmpl(tmpl, dict(self.context)) - stripped = res.strip() - self.assertEqual(stripped, "OK") - - def test_render_mako_variable(self): - tmpl = """${ var }""" - - ctx = dict(self.context) - ctx["var"] = "OK" - res = salt.utils.templates.render_mako_tmpl(tmpl, ctx) - self.assertEqual(res, "OK") - - ### Tests for wempy template - @pytest.mark.skipif( - sys.version_info > (3,), - reason="The wempy module is currently unsupported under Python3", - ) - def test_render_wempy_sanity(self): - tmpl = """OK""" - res = salt.utils.templates.render_wempy_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - @pytest.mark.skipif( - sys.version_info > (3,), - reason="The wempy module is currently unsupported under Python3", - ) - def test_render_wempy_evaluate(self): - tmpl = """{{="OK"}}""" - res = salt.utils.templates.render_wempy_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - @pytest.mark.skipif( - sys.version_info > (3,), - reason="The wempy module is currently unsupported under Python3", - ) - def test_render_wempy_evaluate_multi(self): - tmpl = """{{if 1:}}OK{{pass}}""" - res = salt.utils.templates.render_wempy_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - @pytest.mark.skipif( - sys.version_info > (3,), - reason="The wempy module is currently unsupported under Python3", - ) - def test_render_wempy_variable(self): - tmpl = """{{=var}}""" - - ctx = dict(self.context) - ctx["var"] = "OK" - res = salt.utils.templates.render_wempy_tmpl(tmpl, ctx) - self.assertEqual(res, "OK") - - ### Tests for genshi template (xml-based) - def test_render_genshi_sanity(self): - tmpl = """OK""" - res = salt.utils.templates.render_genshi_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - def test_render_genshi_evaluate(self): - tmpl = """${ "OK" }""" - res = salt.utils.templates.render_genshi_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - def test_render_genshi_evaluate_condition(self): - tmpl = """OK""" - res = salt.utils.templates.render_genshi_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - def test_render_genshi_variable(self): - tmpl = """$var""" - - ctx = dict(self.context) - ctx["var"] = "OK" - res = salt.utils.templates.render_genshi_tmpl(tmpl, ctx) - self.assertEqual(res, "OK") - - def test_render_genshi_variable_replace(self): - tmpl = """not ok""" - - ctx = dict(self.context) - ctx["var"] = "OK" - res = salt.utils.templates.render_genshi_tmpl(tmpl, ctx) - self.assertEqual(res, "OK") - - ### Tests for cheetah template (line-oriented and xml-friendly) - @pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.") - def test_render_cheetah_sanity(self): - tmpl = """OK""" - res = salt.utils.templates.render_cheetah_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - @pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.") - def test_render_cheetah_evaluate(self): - tmpl = """<%="OK"%>""" - res = salt.utils.templates.render_cheetah_tmpl(tmpl, dict(self.context)) - self.assertEqual(res, "OK") - - @pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.") - def test_render_cheetah_evaluate_xml(self): - tmpl = """ - <% if 1: %> - OK - <% pass %> - """ - res = salt.utils.templates.render_cheetah_tmpl(tmpl, dict(self.context)) - stripped = res.strip() - self.assertEqual(stripped, "OK") - - @pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.") - def test_render_cheetah_evaluate_text(self): - tmpl = """ - #if 1 - OK - #end if - """ - - res = salt.utils.templates.render_cheetah_tmpl(tmpl, dict(self.context)) - stripped = res.strip() - self.assertEqual(stripped, "OK") - - @pytest.mark.skipif(not HAS_CHEETAH, reason="The Cheetah Python module is missing.") - def test_render_cheetah_variable(self): - tmpl = """$var""" - - ctx = dict(self.context) - ctx["var"] = "OK" - res = salt.utils.templates.render_cheetah_tmpl(tmpl, ctx) - self.assertEqual(res.strip(), "OK") - - def test_render_jinja_cve_2021_25283(self): - tmpl = """{{ [].__class__ }}""" - ctx = dict(self.context) - ctx["var"] = "OK" - with pytest.raises(salt.exceptions.SaltRenderError): - res = salt.utils.templates.render_jinja_tmpl(tmpl, ctx) - - -class MockRender: - def __call__(self, tplstr, context, tmplpath=None): - self.tplstr = tplstr - self.context = context - self.tmplpath = tmplpath - return tplstr - - -class WrapRenderTestCase(TestCase): - def assertDictContainsAll(self, actual, **expected): - """Make sure dictionary contains at least all expected values""" - actual = {key: actual[key] for key in expected if key in actual} - self.assertEqual(expected, actual) - - def _test_generated_sls_context(self, tmplpath, sls, **expected): - """Generic SLS Context Test""" - # DeNormalize tmplpath - tmplpath = str(PurePath(PurePosixPath(tmplpath))) - if tmplpath.startswith("\\"): - tmplpath = "C:{}".format(tmplpath) - expected["tplpath"] = tmplpath - actual = salt.utils.templates.generate_sls_context(tmplpath, sls) - self.assertDictContainsAll(actual, **expected) - - @patch("salt.utils.templates.generate_sls_context") - @with_tempdir() - def test_sls_context_call(self, tempdir, generate_sls_context): - """Check that generate_sls_context is called with proper parameters""" - sls = "foo.bar" - tmplpath = "/tmp/foo/bar.sls" - - slsfile = os.path.join(tempdir, "foo") - with salt.utils.files.fopen(slsfile, "w") as fp: - fp.write("{{ slspath }}") - context = {"opts": {}, "saltenv": "base", "sls": sls} - render = MockRender() - wrapped = salt.utils.templates.wrap_tmpl_func(render) - res = wrapped(slsfile, context=context, tmplpath=tmplpath) - generate_sls_context.assert_called_with(tmplpath, sls) - - @patch("salt.utils.templates.generate_sls_context") - @with_tempdir() - def test_sls_context_no_call(self, tempdir, generate_sls_context): - """Check that generate_sls_context is not called if sls is not set""" - sls = "foo.bar" - tmplpath = "/tmp/foo/bar.sls" - - slsfile = os.path.join(tempdir, "foo") - with salt.utils.files.fopen(slsfile, "w") as fp: - fp.write("{{ slspath }}") - context = {"opts": {}, "saltenv": "base"} - render = MockRender() - wrapped = salt.utils.templates.wrap_tmpl_func(render) - res = wrapped(slsfile, context=context, tmplpath=tmplpath) - generate_sls_context.assert_not_called() - - def test_generate_sls_context__top_level(self): - """generate_sls_context - top_level Use case""" - self._test_generated_sls_context( - "/tmp/boo.sls", - "boo", - tplfile="boo.sls", - tpldir=".", - tpldot="", - slsdotpath="", - slscolonpath="", - sls_path="", - slspath="", - ) - - def test_generate_sls_context__one_level_init_implicit(self): - """generate_sls_context - Basic one level with implicit init.sls""" - self._test_generated_sls_context( - "/tmp/foo/init.sls", - "foo", - tplfile="foo/init.sls", - tpldir="foo", - tpldot="foo", - slsdotpath="foo", - slscolonpath="foo", - sls_path="foo", - slspath="foo", - ) - - def test_generate_sls_context__one_level_init_explicit(self): - """generate_sls_context - Basic one level with explicit init.sls""" - self._test_generated_sls_context( - "/tmp/foo/init.sls", - "foo.init", - tplfile="foo/init.sls", - tpldir="foo", - tpldot="foo", - slsdotpath="foo", - slscolonpath="foo", - sls_path="foo", - slspath="foo", - ) - - def test_generate_sls_context__one_level(self): - """generate_sls_context - Basic one level with name""" - self._test_generated_sls_context( - "/tmp/foo/boo.sls", - "foo.boo", - tplfile="foo/boo.sls", - tpldir="foo", - tpldot="foo", - slsdotpath="foo", - slscolonpath="foo", - sls_path="foo", - slspath="foo", - ) - - def test_generate_sls_context__one_level_repeating(self): - """generate_sls_context - Basic one level with name same as dir - - (Issue #56410) - """ - self._test_generated_sls_context( - "/tmp/foo/foo.sls", - "foo.foo", - tplfile="foo/foo.sls", - tpldir="foo", - tpldot="foo", - slsdotpath="foo", - slscolonpath="foo", - sls_path="foo", - slspath="foo", - ) - - def test_generate_sls_context__two_level_init_implicit(self): - """generate_sls_context - Basic two level with implicit init.sls""" - self._test_generated_sls_context( - "/tmp/foo/bar/init.sls", - "foo.bar", - tplfile="foo/bar/init.sls", - tpldir="foo/bar", - tpldot="foo.bar", - slsdotpath="foo.bar", - slscolonpath="foo:bar", - sls_path="foo_bar", - slspath="foo/bar", - ) - - def test_generate_sls_context__two_level_init_explicit(self): - """generate_sls_context - Basic two level with explicit init.sls""" - self._test_generated_sls_context( - "/tmp/foo/bar/init.sls", - "foo.bar.init", - tplfile="foo/bar/init.sls", - tpldir="foo/bar", - tpldot="foo.bar", - slsdotpath="foo.bar", - slscolonpath="foo:bar", - sls_path="foo_bar", - slspath="foo/bar", - ) - - def test_generate_sls_context__two_level(self): - """generate_sls_context - Basic two level with name""" - self._test_generated_sls_context( - "/tmp/foo/bar/boo.sls", - "foo.bar.boo", - tplfile="foo/bar/boo.sls", - tpldir="foo/bar", - tpldot="foo.bar", - slsdotpath="foo.bar", - slscolonpath="foo:bar", - sls_path="foo_bar", - slspath="foo/bar", - ) - - def test_generate_sls_context__two_level_repeating(self): - """generate_sls_context - Basic two level with name same as dir - - (Issue #56410) - """ - self._test_generated_sls_context( - "/tmp/foo/foo/foo.sls", - "foo.foo.foo", - tplfile="foo/foo/foo.sls", - tpldir="foo/foo", - tpldot="foo.foo", - slsdotpath="foo.foo", - slscolonpath="foo:foo", - sls_path="foo_foo", - slspath="foo/foo", - ) - - @pytest.mark.skip_on_windows - def test_generate_sls_context__backslash_in_path(self): - """generate_sls_context - Handle backslash in path on non-windows""" - self._test_generated_sls_context( - "/tmp/foo/foo\\foo.sls", - "foo.foo\\foo", - tplfile="foo/foo\\foo.sls", - tpldir="foo", - tpldot="foo", - slsdotpath="foo", - slscolonpath="foo", - sls_path="foo", - slspath="foo", - ) diff --git a/tests/unit/utils/test_verify.py b/tests/unit/utils/test_verify.py deleted file mode 100644 index 4eb4cfb04109..000000000000 --- a/tests/unit/utils/test_verify.py +++ /dev/null @@ -1,393 +0,0 @@ -""" -Test the verification routines -""" - -import ctypes -import getpass -import os -import shutil -import socket -import stat -import sys -import tempfile - -import pytest - -import salt.utils.files -import salt.utils.platform -from salt.utils.verify import ( - check_max_open_files, - check_user, - clean_path, - log, - valid_id, - verify_env, - verify_log, - verify_log_files, - verify_logs_filter, - verify_socket, - zmq_version, -) -from tests.support.helpers import TstSuiteLoggingHandler -from tests.support.mock import MagicMock, patch -from tests.support.runtests import RUNTIME_VARS -from tests.support.unit import TestCase - -if sys.platform.startswith("win"): - import win32file -else: - import resource - - -class TestVerify(TestCase): - """ - Verify module tests - """ - - def test_valid_id_exception_handler(self): - """ - Ensure we just return False if we pass in invalid or undefined paths. - Refs #8259 - """ - opts = {"pki_dir": "/tmp/whatever"} - self.assertFalse(valid_id(opts, None)) - - def test_valid_id_pathsep(self): - """ - Path separators in id should make it invalid - """ - opts = {"pki_dir": "/tmp/whatever"} - # We have to test both path separators because os.path.normpath will - # convert forward slashes to backslashes on Windows. - for pathsep in ("/", "\\"): - self.assertFalse(valid_id(opts, pathsep.join(("..", "foobar")))) - - def test_zmq_verify(self): - self.assertTrue(zmq_version()) - - def test_zmq_verify_insufficient(self): - import zmq - - with patch.object(zmq, "__version__", "2.1.0"): - self.assertFalse(zmq_version()) - - def test_user(self): - self.assertTrue(check_user(getpass.getuser())) - - def test_no_user(self): - # Catch sys.stderr here since no logging is configured and - # check_user WILL write to sys.stderr - class FakeWriter: - def __init__(self): - self.output = "" - self.errors = "strict" - - def write(self, data): - self.output += data - - def flush(self): - pass - - stderr = sys.stderr - writer = FakeWriter() - sys.stderr = writer - try: - # Now run the test - if sys.platform.startswith("win"): - self.assertTrue(check_user("nouser")) - else: - with self.assertRaises(SystemExit): - self.assertFalse(check_user("nouser")) - finally: - # Restore sys.stderr - sys.stderr = stderr - if writer.output != 'CRITICAL: User not found: "nouser"\n': - # If there's a different error catch, write it to sys.stderr - sys.stderr.write(writer.output) - - @pytest.mark.skip_on_windows(reason="No verify_env Windows") - def test_verify_env(self): - root_dir = tempfile.mkdtemp(dir=RUNTIME_VARS.TMP) - var_dir = os.path.join(root_dir, "var", "log", "salt") - key_dir = os.path.join(root_dir, "key_dir") - verify_env([var_dir], getpass.getuser(), root_dir=root_dir) - self.assertTrue(os.path.exists(var_dir)) - dir_stat = os.stat(var_dir) - self.assertEqual(dir_stat.st_uid, os.getuid()) - self.assertEqual(dir_stat.st_mode & stat.S_IRWXU, stat.S_IRWXU) - self.assertEqual(dir_stat.st_mode & stat.S_IRWXG, 40) - self.assertEqual(dir_stat.st_mode & stat.S_IRWXO, 5) - - @pytest.mark.requires_network(only_local_network=True) - def test_verify_socket(self): - self.assertTrue(verify_socket("", 18000, 18001)) - if socket.has_ipv6: - # Only run if Python is built with IPv6 support; otherwise - # this will just fail. - try: - self.assertTrue(verify_socket("::", 18000, 18001)) - except OSError: - # Python has IPv6 enabled, but the system cannot create - # IPv6 sockets (otherwise the test would return a bool) - # - skip the test - # - # FIXME - possibly emit a message that the system does - # not support IPv6. - pass - - def test_max_open_files(self): - with TstSuiteLoggingHandler() as handler: - logmsg_dbg = "DEBUG:This salt-master instance has accepted {0} minion keys." - logmsg_chk = ( - "{0}:The number of accepted minion keys({1}) should be lower " - "than 1/4 of the max open files soft setting({2}). According " - "to the system's hard limit, there's still a margin of {3} " - "to raise the salt's max_open_files setting. Please consider " - "raising this value." - ) - logmsg_crash = ( - "{0}:The number of accepted minion keys({1}) should be lower " - "than 1/4 of the max open files soft setting({2}). " - "salt-master will crash pretty soon! According to the " - "system's hard limit, there's still a margin of {3} to " - "raise the salt's max_open_files setting. Please consider " - "raising this value." - ) - if sys.platform.startswith("win"): - logmsg_crash = ( - "{0}:The number of accepted minion keys({1}) should be lower " - "than 1/4 of the max open files soft setting({2}). " - "salt-master will crash pretty soon! Please consider " - "raising this value." - ) - - if sys.platform.startswith("win"): - # Check the Windows API for more detail on this - # http://msdn.microsoft.com/en-us/library/xt874334(v=vs.71).aspx - # and the python binding http://timgolden.me.uk/pywin32-docs/win32file.html - mof_s = mof_h = win32file._getmaxstdio() - else: - mof_s, mof_h = resource.getrlimit(resource.RLIMIT_NOFILE) - tempdir = tempfile.mkdtemp(prefix="fake-keys") - keys_dir = os.path.join(tempdir, "minions") - os.makedirs(keys_dir) - - mof_test = 256 - - if sys.platform.startswith("win"): - win32file._setmaxstdio(mof_test) - else: - resource.setrlimit(resource.RLIMIT_NOFILE, (mof_test, mof_h)) - - try: - prev = 0 - for newmax, level in ( - (24, None), - (66, "INFO"), - (127, "WARNING"), - (196, "CRITICAL"), - ): - - for n in range(prev, newmax): - kpath = os.path.join(keys_dir, str(n)) - with salt.utils.files.fopen(kpath, "w") as fp_: - fp_.write(str(n)) - - opts = {"max_open_files": newmax, "pki_dir": tempdir} - - check_max_open_files(opts) - - if level is None: - # No log message is triggered, only the DEBUG one which - # tells us how many minion keys were accepted. - self.assertEqual([logmsg_dbg.format(newmax)], handler.messages) - else: - self.assertIn(logmsg_dbg.format(newmax), handler.messages) - self.assertIn( - logmsg_chk.format( - level, - newmax, - mof_test, - mof_test - newmax - if sys.platform.startswith("win") - else mof_h - newmax, - ), - handler.messages, - ) - handler.clear() - prev = newmax - - newmax = mof_test - for n in range(prev, newmax): - kpath = os.path.join(keys_dir, str(n)) - with salt.utils.files.fopen(kpath, "w") as fp_: - fp_.write(str(n)) - - opts = {"max_open_files": newmax, "pki_dir": tempdir} - - check_max_open_files(opts) - self.assertIn(logmsg_dbg.format(newmax), handler.messages) - self.assertIn( - logmsg_crash.format( - "CRITICAL", - newmax, - mof_test, - mof_test - newmax - if sys.platform.startswith("win") - else mof_h - newmax, - ), - handler.messages, - ) - handler.clear() - except OSError as err: - if err.errno == 24: - # Too many open files - self.skipTest("We've hit the max open files setting") - raise - finally: - if sys.platform.startswith("win"): - win32file._setmaxstdio(mof_h) - else: - resource.setrlimit(resource.RLIMIT_NOFILE, (mof_s, mof_h)) - shutil.rmtree(tempdir) - - def test_verify_log(self): - """ - Test that verify_log works as expected - """ - message = ( - "Insecure logging configuration detected! Sensitive data may be logged." - ) - - mock_cheese = MagicMock() - with patch.object(log, "warning", mock_cheese): - verify_log({"log_level": "cheeseshop"}) - mock_cheese.assert_called_once_with(message) - - mock_trace = MagicMock() - with patch.object(log, "warning", mock_trace): - verify_log({"log_level": "trace"}) - mock_trace.assert_called_once_with(message) - - mock_none = MagicMock() - with patch.object(log, "warning", mock_none): - verify_log({}) - mock_none.assert_called_once_with(message) - - mock_info = MagicMock() - with patch.object(log, "warning", mock_info): - verify_log({"log_level": "info"}) - self.assertTrue(mock_info.call_count == 0) - - -class TestVerifyLog(TestCase): - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - def test_verify_logs_filter(self): - filtered = verify_logs_filter( - ["udp://foo", "tcp://bar", "/tmp/foo", "file://tmp/bar"] - ) - assert filtered == ["/tmp/foo"], filtered - - @pytest.mark.skip_on_windows(reason="Not applicable on Windows") - def test_verify_log_files_udp_scheme(self): - verify_log_files(["udp://foo"], getpass.getuser()) - self.assertFalse(os.path.isdir(os.path.join(os.getcwd(), "udp:"))) - - @pytest.mark.skip_on_windows(reason="Not applicable on Windows") - def test_verify_log_files_tcp_scheme(self): - verify_log_files(["udp://foo"], getpass.getuser()) - self.assertFalse(os.path.isdir(os.path.join(os.getcwd(), "tcp:"))) - - @pytest.mark.skip_on_windows(reason="Not applicable on Windows") - def test_verify_log_files_file_scheme(self): - verify_log_files(["file://{}"], getpass.getuser()) - self.assertFalse(os.path.isdir(os.path.join(os.getcwd(), "file:"))) - - @pytest.mark.skip_on_windows(reason="Not applicable on Windows") - def test_verify_log_files(self): - path = os.path.join(self.tmpdir, "foo", "bar.log") - self.assertFalse(os.path.exists(path)) - verify_log_files([path], getpass.getuser()) - self.assertTrue(os.path.exists(path)) - - -class TestCleanPath(TestCase): - """ - salt.utils.clean_path works as expected - """ - - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - def test_clean_path_valid(self): - path_a = os.path.join(self.tmpdir, "foo") - path_b = os.path.join(self.tmpdir, "foo", "bar") - assert clean_path(path_a, path_b) == path_b - - def test_clean_path_invalid(self): - path_a = os.path.join(self.tmpdir, "foo") - path_b = os.path.join(self.tmpdir, "baz", "bar") - assert clean_path(path_a, path_b) == "" - - -__CSL = None - - -def symlink(source, link_name): - """ - symlink(source, link_name) Creates a symbolic link pointing to source named - link_name - """ - global __CSL - if __CSL is None: - csl = ctypes.windll.kernel32.CreateSymbolicLinkW - csl.argtypes = (ctypes.c_wchar_p, ctypes.c_wchar_p, ctypes.c_uint32) - csl.restype = ctypes.c_ubyte - __CSL = csl - flags = 0 - if source is not None and os.path.isdir(source): - flags = 1 - if __CSL(link_name, source, flags) == 0: - raise ctypes.WinError() - - -class TestCleanPathLink(TestCase): - """ - Ensure salt.utils.clean_path works with symlinked directories and files - """ - - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - self.to_path = os.path.join(self.tmpdir, "linkto") - self.from_path = os.path.join(self.tmpdir, "linkfrom") - if salt.utils.platform.is_windows(): - kwargs = {} - else: - kwargs = {"target_is_directory": True} - if salt.utils.platform.is_windows(): - symlink(self.to_path, self.from_path, **kwargs) - else: - os.symlink(self.to_path, self.from_path, **kwargs) - - def tearDown(self): - shutil.rmtree(self.tmpdir) - - def test_clean_path_symlinked_src(self): - test_path = os.path.join(self.from_path, "test") - expect_path = os.path.join(self.to_path, "test") - ret = clean_path(self.from_path, test_path) - assert ret == expect_path, "{} is not {}".format(ret, expect_path) - - def test_clean_path_symlinked_tgt(self): - test_path = os.path.join(self.to_path, "test") - expect_path = os.path.join(self.to_path, "test") - ret = clean_path(self.from_path, test_path) - assert ret == expect_path, "{} is not {}".format(ret, expect_path) diff --git a/tests/unit/utils/test_win_network.py b/tests/unit/utils/test_win_network.py deleted file mode 100644 index 12f5cd877dbb..000000000000 --- a/tests/unit/utils/test_win_network.py +++ /dev/null @@ -1,300 +0,0 @@ -import pytest - -import salt.utils.win_network as win_network -from tests.support.mock import MagicMock, patch -from tests.support.unit import TestCase - - -class PhysicalAddress: - def __init__(self, address): - self.address = address - - def ToString(self): - return str(self.address) - - -class Interface: - """ - Mocked interface object - """ - - def __init__( - self, - i_address="02D5F1DD31E0", - i_description="Dell GigabitEthernet", - i_id="{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", - i_name="Ethernet", - i_receive_only=False, - i_status=1, - i_type=6, - ): - self.PhysicalAddress = PhysicalAddress(i_address) - self.Description = i_description - self.Id = i_id - self.Name = i_name - self.NetworkInterfaceType = i_type - self.IsReceiveOnly = i_receive_only - self.OperationalStatus = i_status - - def GetPhysicalAddress(self): - return self.PhysicalAddress - - -@pytest.mark.skip_unless_on_windows -class WinNetworkTestCase(TestCase): - def setUp(self): - self.mock_ip_base = MagicMock( - return_value={ - "dns_enabled": False, - "dns_suffix": "", - "dynamic_dns_enabled": False, - } - ) - self.mock_unicast = MagicMock( - return_value={ - "ip_addresses": [ - { - "address": "172.18.87.49", - "broadcast": "172.18.87.63", - "loopback": "127.0.0.1", - "netmask": "255.255.255.240", - "prefix_length": 28, - "prefix_origin": "Manual", - "suffix_origin": "Manual", - } - ], - "ipv6_addresses": [ - { - "address": "fe80::e8a4:1224:5548:2b81", - "interface_index": 32, - "prefix_length": 64, - "prefix_origin": "WellKnown", - "suffix_origin": "Router", - } - ], - } - ) - self.mock_gateway = MagicMock( - return_value={ - "ip_gateways": ["192.168.0.1"], - "ipv6_gateways": ["fe80::208:a2ff:fe0b:de70"], - } - ) - self.mock_dns = MagicMock( - return_value={ - "ip_dns": ["10.4.0.1", "10.1.0.1", "8.8.8.8"], - "ipv6_dns": ["2600:740a:1:304::1"], - } - ) - self.mock_multicast = MagicMock( - return_value={ - "ip_multicast": [ - "224.0.0.1", - "224.0.0.251", - "224.0.0.252", - "230.230.230.230", - "239.0.0.250", - "239.255.255.250", - ], - "ipv6_multicast": [ - "ff01::1", - "ff02::1", - "ff02::c", - "ff02::fb", - "ff02::1:3", - "ff02::1:ff0f:4c48", - "ff02::1:ffa6:f6e6", - ], - } - ) - self.mock_anycast = MagicMock( - return_value={"ip_anycast": [], "ipv6_anycast": []} - ) - self.mock_wins = MagicMock(return_value={"ip_wins": []}) - - def test_get_interface_info_dot_net(self): - expected = { - "Ethernet": { - "alias": "Ethernet", - "description": "Dell GigabitEthernet", - "dns_enabled": False, - "dns_suffix": "", - "dynamic_dns_enabled": False, - "id": "{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", - "ip_addresses": [ - { - "address": "172.18.87.49", - "broadcast": "172.18.87.63", - "loopback": "127.0.0.1", - "netmask": "255.255.255.240", - "prefix_length": 28, - "prefix_origin": "Manual", - "suffix_origin": "Manual", - } - ], - "ip_anycast": [], - "ip_dns": ["10.4.0.1", "10.1.0.1", "8.8.8.8"], - "ip_gateways": ["192.168.0.1"], - "ip_multicast": [ - "224.0.0.1", - "224.0.0.251", - "224.0.0.252", - "230.230.230.230", - "239.0.0.250", - "239.255.255.250", - ], - "ip_wins": [], - "ipv6_addresses": [ - { - "address": "fe80::e8a4:1224:5548:2b81", - "interface_index": 32, - "prefix_length": 64, - "prefix_origin": "WellKnown", - "suffix_origin": "Router", - } - ], - "ipv6_anycast": [], - "ipv6_dns": ["2600:740a:1:304::1"], - "ipv6_gateways": ["fe80::208:a2ff:fe0b:de70"], - "ipv6_multicast": [ - "ff01::1", - "ff02::1", - "ff02::c", - "ff02::fb", - "ff02::1:3", - "ff02::1:ff0f:4c48", - "ff02::1:ffa6:f6e6", - ], - "physical_address": "02:D5:F1:DD:31:E0", - "receive_only": False, - "status": "Up", - "type": "Ethernet", - } - } - - mock_int = MagicMock(return_value=[Interface()]) - with patch.object( - win_network, "_get_network_interfaces", mock_int - ), patch.object( - win_network, "_get_ip_base_properties", self.mock_ip_base - ), patch.object( - win_network, "_get_ip_unicast_info", self.mock_unicast - ), patch.object( - win_network, "_get_ip_gateway_info", self.mock_gateway - ), patch.object( - win_network, "_get_ip_dns_info", self.mock_dns - ), patch.object( - win_network, "_get_ip_multicast_info", self.mock_multicast - ), patch.object( - win_network, "_get_ip_anycast_info", self.mock_anycast - ), patch.object( - win_network, "_get_ip_wins_info", self.mock_wins - ): - - # ret = win_network._get_base_properties() - results = win_network.get_interface_info_dot_net() - - self.assertDictEqual(expected, results) - - def test_get_network_info(self): - expected = { - "Dell GigabitEthernet": { - "hwaddr": "02:D5:F1:DD:31:E0", - "inet": [ - { - "address": "172.18.87.49", - "broadcast": "172.18.87.63", - "gateway": "192.168.0.1", - "label": "Dell GigabitEthernet", - "netmask": "255.255.255.240", - } - ], - "inet6": [ - { - "address": "fe80::e8a4:1224:5548:2b81", - "gateway": "fe80::208:a2ff:fe0b:de70", - "prefixlen": 64, - } - ], - "up": True, - } - } - mock_int = MagicMock(return_value=[Interface()]) - with patch.object( - win_network, "_get_network_interfaces", mock_int - ), patch.object( - win_network, "_get_ip_base_properties", self.mock_ip_base - ), patch.object( - win_network, "_get_ip_unicast_info", self.mock_unicast - ), patch.object( - win_network, "_get_ip_gateway_info", self.mock_gateway - ), patch.object( - win_network, "_get_ip_dns_info", self.mock_dns - ), patch.object( - win_network, "_get_ip_multicast_info", self.mock_multicast - ), patch.object( - win_network, "_get_ip_anycast_info", self.mock_anycast - ), patch.object( - win_network, "_get_ip_wins_info", self.mock_wins - ): - - # ret = win_network._get_base_properties() - results = win_network.get_interface_info() - - self.assertDictEqual(expected, results) - - def test__get_base_properties_tap_adapter(self): - """ - Adapter Type 53 is apparently an undocumented type corresponding to - OpenVPN TAP Adapters and possibly other TAP Adapters. This test makes - sure the win_network util will catch that. - https://github.com/saltstack/salt/issues/56196 - https://github.com/saltstack/salt/issues/56275 - """ - i_face = Interface( - i_address="03DE4D0713FA", - i_description="Windows TAP Adapter", - i_id="{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", - i_name="Windows TAP Adapter", - i_receive_only=False, - i_status=1, - i_type=53, - ) - expected = { - "alias": "Windows TAP Adapter", - "description": "Windows TAP Adapter", - "id": "{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", - "receive_only": False, - "physical_address": "03:DE:4D:07:13:FA", - "status": "Up", - "type": "TAPAdapter", - } - results = win_network._get_base_properties(i_face=i_face) - self.assertDictEqual(expected, results) - - def test__get_base_properties_undefined_adapter(self): - """ - The Adapter Type 53 may be an arbitrary number assigned by OpenVPN. - This will test the ability to avoid stack tracing on an undefined - adapter type. If one is encountered, just use the description. - """ - i_face = Interface( - i_address="03DE4D0713FA", - i_description="Undefined Adapter", - i_id="{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", - i_name="Undefined", - i_receive_only=False, - i_status=1, - i_type=50, - ) - expected = { - "alias": "Undefined", - "description": "Undefined Adapter", - "id": "{C5F468C0-DD5F-4C2B-939F-A411DCB5DE16}", - "receive_only": False, - "physical_address": "03:DE:4D:07:13:FA", - "status": "Up", - "type": "Undefined Adapter", - } - results = win_network._get_base_properties(i_face=i_face) - self.assertDictEqual(expected, results) diff --git a/tools/__init__.py b/tools/__init__.py index 429e1d5afb0d..1d46e63b2d5b 100644 --- a/tools/__init__.py +++ b/tools/__init__.py @@ -1,14 +1,53 @@ import logging +import pathlib +import sys import ptscripts +from ptscripts.parser import DefaultRequirementsConfig +from ptscripts.virtualenv import VirtualEnvConfig +REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent +REQUIREMENTS_FILES_PATH = REPO_ROOT / "requirements" +STATIC_REQUIREMENTS_PATH = REQUIREMENTS_FILES_PATH / "static" +CI_REQUIREMENTS_FILES_PATH = ( + STATIC_REQUIREMENTS_PATH / "ci" / "py{}.{}".format(*sys.version_info) +) +DEFAULT_REQS_CONFIG = DefaultRequirementsConfig( + pip_args=[ + f"--constraint={REQUIREMENTS_FILES_PATH / 'constraints.txt'}", + ], + requirements_files=[ + CI_REQUIREMENTS_FILES_PATH / "tools.txt", + ], +) +RELEASE_VENV_CONFIG = VirtualEnvConfig( + pip_args=[ + f"--constraint={REQUIREMENTS_FILES_PATH / 'constraints.txt'}", + ], + requirements_files=[ + CI_REQUIREMENTS_FILES_PATH / "tools-virustotal.txt", + ], + add_as_extra_site_packages=True, +) +ptscripts.set_default_requirements_config(DEFAULT_REQS_CONFIG) ptscripts.register_tools_module("tools.changelog") ptscripts.register_tools_module("tools.ci") ptscripts.register_tools_module("tools.docs") ptscripts.register_tools_module("tools.pkg") -ptscripts.register_tools_module("tools.pkgrepo") -ptscripts.register_tools_module("tools.pre_commit") -ptscripts.register_tools_module("tools.release") +ptscripts.register_tools_module("tools.pkg.repo") +ptscripts.register_tools_module("tools.pkg.build") +ptscripts.register_tools_module("tools.pkg.repo.create") +ptscripts.register_tools_module("tools.pkg.repo.publish") +ptscripts.register_tools_module("tools.precommit") +ptscripts.register_tools_module("tools.precommit.changelog") +ptscripts.register_tools_module("tools.precommit.workflows") +ptscripts.register_tools_module("tools.precommit.docs") +ptscripts.register_tools_module("tools.precommit.docstrings") +ptscripts.register_tools_module("tools.precommit.filemap") +ptscripts.register_tools_module("tools.precommit.loader") +ptscripts.register_tools_module("tools.release", venv_config=RELEASE_VENV_CONFIG) +ptscripts.register_tools_module("tools.testsuite") +ptscripts.register_tools_module("tools.testsuite.download") ptscripts.register_tools_module("tools.vm") for name in ("boto3", "botocore", "urllib3"): diff --git a/tools/changelog.py b/tools/changelog.py index 0a2762f62631..12bbba22d3c2 100644 --- a/tools/changelog.py +++ b/tools/changelog.py @@ -8,27 +8,14 @@ import logging import os import pathlib -import re import sys import textwrap +from jinja2 import Environment, FileSystemLoader from ptscripts import Context, command_group from tools.utils import REPO_ROOT, Version -CHANGELOG_LIKE_RE = re.compile(r"([\d]+)\.([a-z]+)$") -CHANGELOG_TYPES = ( - "removed", - "deprecated", - "changed", - "fixed", - "added", - "security", -) -CHANGELOG_ENTRY_RE = re.compile( - r"([\d]+|(CVE|cve)-[\d]{{4}}-[\d]+)\.({})(\.md)?$".format("|".join(CHANGELOG_TYPES)) -) - log = logging.getLogger(__name__) # Define the command group @@ -49,103 +36,6 @@ ) -@changelog.command( - name="pre-commit-checks", - arguments={ - "files": { - "nargs": "*", - } - }, -) -def check_changelog_entries(ctx: Context, files: list[pathlib.Path]): - """ - Run pre-commit checks on changelog snippets. - """ - docs_path = REPO_ROOT / "doc" - tests_integration_files_path = REPO_ROOT / "tests" / "integration" / "files" - changelog_entries_path = REPO_ROOT / "changelog" - exitcode = 0 - for entry in files: - path = pathlib.Path(entry).resolve() - # Is it under changelog/ - try: - path.relative_to(changelog_entries_path) - if path.name in (".keep", ".template.jinja"): - # This is the file we use so git doesn't delete the changelog/ directory - continue - # Is it named properly - if not CHANGELOG_ENTRY_RE.match(path.name): - ctx.error( - "The changelog entry '{}' should have one of the following extensions: {}.".format( - path.relative_to(REPO_ROOT), - ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), - ), - ) - exitcode = 1 - continue - if path.suffix != ".md": - ctx.error( - f"Please rename '{path.relative_to(REPO_ROOT)}' to " - f"'{path.relative_to(REPO_ROOT)}.md'" - ) - exitcode = 1 - continue - except ValueError: - # No, carry on - pass - # Does it look like a changelog entry - if CHANGELOG_LIKE_RE.match(path.name) and not CHANGELOG_ENTRY_RE.match( - path.name - ): - try: - # Is this under doc/ - path.relative_to(docs_path) - # Yes, carry on - continue - except ValueError: - # No, resume the check - pass - try: - # Is this under tests/integration/files - path.relative_to(tests_integration_files_path) - # Yes, carry on - continue - except ValueError: - # No, resume the check - pass - ctx.error( - "The changelog entry '{}' should have one of the following extensions: {}.".format( - path.relative_to(REPO_ROOT), - ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), - ) - ) - exitcode = 1 - continue - # Is it a changelog entry - if not CHANGELOG_ENTRY_RE.match(path.name): - # No? Carry on - continue - # Is the changelog entry in the right path? - try: - path.relative_to(changelog_entries_path) - except ValueError: - exitcode = 1 - ctx.error( - "The changelog entry '{}' should be placed under '{}/', not '{}'".format( - path.name, - changelog_entries_path.relative_to(REPO_ROOT), - path.relative_to(REPO_ROOT).parent, - ) - ) - if path.suffix != ".md": - ctx.error( - f"Please rename '{path.relative_to(REPO_ROOT)}' to " - f"'{path.relative_to(REPO_ROOT)}.md'" - ) - exitcode = 1 - ctx.exit(exitcode) - - def _get_changelog_contents(ctx: Context, version: Version): """ Return the full changelog generated by towncrier. @@ -174,8 +64,11 @@ def _get_pkg_changelog_contents(ctx: Context, version: Version): return changes -def _get_salt_version(ctx): - ret = ctx.run("python3", "salt/version.py", capture=True, check=False) +def _get_salt_version(ctx, next_release=False): + args = [] + if next_release: + args.append("--next-release") + ret = ctx.run("python3", "salt/version.py", *args, capture=True, check=False) if ret.returncode: ctx.error(ret.stderr.decode()) ctx.exit(1) @@ -260,7 +153,7 @@ def update_deb(ctx: Context, salt_version: Version, draft: bool = False): debian_changelog_path = "pkg/debian/changelog" tmp_debian_changelog_path = f"{debian_changelog_path}.1" with open(tmp_debian_changelog_path, "w") as wfp: - wfp.write(f"salt (1:{salt_version}) stable; urgency=medium\n\n") + wfp.write(f"salt ({salt_version}) stable; urgency=medium\n\n") wfp.write(formated) wfp.write( f"\n -- Salt Project Packaging {date}\n\n" @@ -295,13 +188,24 @@ def update_deb(ctx: Context, salt_version: Version, draft: bool = False): "release": { "help": "Update for an actual release and not just a temporary CI build.", }, + "template_only": { + "help": "Only generate a template file.", + }, + "next_release": { + "help": "Generate release notes for the next upcoming release.", + }, }, ) def update_release_notes( - ctx: Context, salt_version: Version, draft: bool = False, release: bool = False + ctx: Context, + salt_version: Version, + draft: bool = False, + release: bool = False, + template_only: bool = False, + next_release: bool = False, ): if salt_version is None: - salt_version = _get_salt_version(ctx) + salt_version = _get_salt_version(ctx, next_release=next_release) changes = _get_changelog_contents(ctx, salt_version) changes = "\n".join(changes.split("\n")[2:]) if salt_version.local: @@ -309,38 +213,80 @@ def update_release_notes( versions = {} for fpath in pathlib.Path("doc/topics/releases").glob("*.md"): versions[(Version(fpath.stem))] = fpath - release_notes_path = versions[sorted(versions)[-1]] + latest_version = sorted(versions)[-1] + release_notes_path = versions[latest_version] + version = ".".join(str(part) for part in latest_version.release) else: + version = ".".join(str(part) for part in salt_version.release) release_notes_path = pathlib.Path("doc/topics/releases") / "{}.md".format( - ".".join(str(part) for part in salt_version.release) + version ) - if not release_notes_path.exists(): - release_notes_path.write_text( + + template_release_path = ( + release_notes_path.parent / "templates" / f"{version}.md.template" + ) + if not template_release_path.exists(): + template_release_path.write_text( textwrap.dedent( f"""\ (release-{salt_version})= - # Salt {salt_version} release notes - UNRELEASED + # Salt {salt_version} release notes{{{{ unreleased }}}} + {{{{ warning }}}} + + + + + ## Changelog + {{{{ changelog }}}} """ ) ) - ctx.run("git", "add", str(release_notes_path)) - ctx.info(f"Created bare {release_notes_path} release notes file") - - existing = release_notes_path.read_text() + ctx.run("git", "add", str(template_release_path)) + ctx.info(f"Created template {template_release_path} release notes file") + if template_only: + # Only generate the template for a new release + return + unreleased = " - UNRELEASED" + warning = f""" + + """ if release is True: - existing = existing.replace(" - UNRELEASED", "") + unreleased = "" tmp_release_notes_path = ( release_notes_path.parent / f"{release_notes_path.name}.tmp" ) - tmp_release_notes_path.write_text(f"{existing}\n## Changelog\n{changes}") + + # render the release notes jinja template + environment = Environment(loader=FileSystemLoader(template_release_path.parent)) + template = environment.get_template(template_release_path.name) + content = template.render( + {"changelog": changes, "unreleased": unreleased, "warning": warning} + ) + + tmp_release_notes_path.write_text(content) try: contents = tmp_release_notes_path.read_text().strip() if draft: ctx.print(contents, soft_wrap=True) else: + new_release_file = False + if not release_notes_path.exists(): + new_release_file = True release_notes_path.write_text(contents) + if new_release_file: + ctx.run("git", "add", str(release_notes_path)) + ctx.info(f"Created bare {release_notes_path} release notes file") finally: os.remove(tmp_release_notes_path) diff --git a/tools/ci.py b/tools/ci.py index 64d6fa39452c..e4ef802d9f24 100644 --- a/tools/ci.py +++ b/tools/ci.py @@ -8,12 +8,32 @@ import logging import os import pathlib +import random +import shutil +import sys import time -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from ptscripts import Context, command_group import tools.utils +import tools.utils.gh + +if sys.version_info < (3, 11): + from typing_extensions import NotRequired, TypedDict +else: + from typing import NotRequired, TypedDict # pylint: disable=no-name-in-module + +try: + import boto3 +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise log = logging.getLogger(__name__) @@ -229,6 +249,9 @@ def runner_types(ctx: Context, event_name: str): "skip_pkg_tests": { "help": "Skip running the Salt Package tests", }, + "skip_pkg_download_tests": { + "help": "Skip running the Salt Package download tests", + }, "changed_files": { "help": ( "Path to '.json' file containing the payload of changed files " @@ -243,6 +266,7 @@ def define_jobs( changed_files: pathlib.Path, skip_tests: bool = False, skip_pkg_tests: bool = False, + skip_pkg_download_tests: bool = False, ): """ Set GH Actions 'jobs' output to know which jobs should run. @@ -267,18 +291,22 @@ def define_jobs( "lint": True, "test": True, "test-pkg": True, + "test-pkg-download": True, "prepare-release": True, "build-docs": True, "build-source-tarball": True, "build-deps-onedir": True, "build-salt-onedir": True, "build-pkgs": True, + "build-deps-ci": True, } if skip_tests: jobs["test"] = False if skip_pkg_tests: jobs["test-pkg"] = False + if skip_pkg_download_tests: + jobs["test-pkg-download"] = False if event_name != "pull_request": # In this case, all defined jobs should run @@ -292,6 +320,23 @@ def define_jobs( ) return + # This is a pull-request + + labels: list[str] = [] + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is not None: + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + ctx.exit(1) + + labels.extend( + label[0] for label in _get_pr_test_labels_from_event_payload(gh_event) + ) + if not changed_files.exists(): ctx.error(f"The '{changed_files}' file does not exist.") ctx.error( @@ -338,15 +383,30 @@ def define_jobs( required_pkg_test_changes: set[str] = { changed_files_contents["pkg_tests"], + changed_files_contents["workflows"], + changed_files_contents["golden_images"], } if jobs["test-pkg"] and required_pkg_test_changes == {"false"}: + if "test:pkg" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write( + "The 'test-pkg' job is forcefully selected by the use of the 'test:pkg' label.\n" + ) + jobs["test-pkg"] = True + else: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("De-selecting the 'test-pkg' job.\n") + jobs["test-pkg"] = False + + if jobs["test-pkg-download"] and required_pkg_test_changes == {"false"}: with open(github_step_summary, "a", encoding="utf-8") as wfh: - wfh.write("De-selecting the 'test-pkg' job.\n") - jobs["test-pkg"] = False + wfh.write("De-selecting the 'test-pkg-download' job.\n") + jobs["test-pkg-download"] = False - if not jobs["test"] and not jobs["test-pkg"]: + if not jobs["test"] and not jobs["test-pkg"] and not jobs["test-pkg-download"]: with open(github_step_summary, "a", encoding="utf-8") as wfh: for job in ( + "build-deps-ci", "build-deps-onedir", "build-salt-onedir", "build-pkgs", @@ -362,18 +422,19 @@ def define_jobs( wfh.write("Selected Jobs:\n") for name, value in sorted(jobs.items()): wfh.write(f" - {name}: {value}\n") - wfh.write( - "\n
\nAll Changed Files (click me)\n
\n"
-        )
-        for path in sorted(json.loads(changed_files_contents["repo_files"])):
-            wfh.write(f"{path}\n")
-        wfh.write("
\n
\n") ctx.info("Writing 'jobs' to the github outputs file") with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"jobs={json.dumps(jobs)}\n") +class TestRun(TypedDict): + type: str + skip_code_coverage: bool + from_filenames: NotRequired[str] + selected_tests: NotRequired[dict[str, bool]] + + @ci.command( name="define-testrun", arguments={ @@ -408,10 +469,35 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): if TYPE_CHECKING: assert github_step_summary is not None - if event_name != "pull_request": + labels: list[str] = [] + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is not None: + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + ctx.exit(1) + + labels.extend( + label[0] for label in _get_pr_test_labels_from_event_payload(gh_event) + ) + + if "test:coverage" in labels: + ctx.info("Writing 'testrun' to the github outputs file") + testrun = TestRun(type="full", skip_code_coverage=False) + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"testrun={json.dumps(testrun)}\n") + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write( + "Full test run chosen because the label `test:coverage` is set.\n" + ) + return + elif event_name != "pull_request": # In this case, a full test run is in order ctx.info("Writing 'testrun' to the github outputs file") - testrun = {"type": "full"} + testrun = TestRun(type="full", skip_code_coverage=False) with open(github_output, "a", encoding="utf-8") as wfh: wfh.write(f"testrun={json.dumps(testrun)}\n") @@ -419,6 +505,8 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): wfh.write(f"Full test run chosen due to event type of `{event_name}`.\n") return + # So, it's a pull request... + if not changed_files.exists(): ctx.error(f"The '{changed_files}' file does not exist.") ctx.error( @@ -432,8 +520,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): ctx.error(f"Could not load the changed files from '{changed_files}': {exc}") ctx.exit(1) - # So, it's a pull request... - # Based on which files changed, or other things like PR comments we can + # Based on which files changed, or other things like PR labels we can # decide what to run, or even if the full test run should be running on the # pull request, etc... changed_pkg_requirements_files = json.loads( @@ -448,7 +535,7 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): "Full test run chosen because there was a change made " "to `cicd/golden-images.json`.\n" ) - testrun = {"type": "full"} + testrun = TestRun(type="full", skip_code_coverage=True) elif changed_pkg_requirements_files or changed_test_requirements_files: with open(github_step_summary, "a", encoding="utf-8") as wfh: wfh.write( @@ -463,15 +550,20 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): ): wfh.write(f"{path}\n") wfh.write("\n\n") - testrun = {"type": "full"} + testrun = TestRun(type="full", skip_code_coverage=True) + elif "test:full" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Full test run chosen because the label `test:full` is set.\n") + testrun = TestRun(type="full", skip_code_coverage=True) else: testrun_changed_files_path = tools.utils.REPO_ROOT / "testrun-changed-files.txt" - testrun = { - "type": "changed", - "from-filenames": str( + testrun = TestRun( + type="changed", + skip_code_coverage=True, + from_filenames=str( testrun_changed_files_path.relative_to(tools.utils.REPO_ROOT) ), - } + ) ctx.info(f"Writing {testrun_changed_files_path.name} ...") selected_changed_files = [] for fpath in json.loads(changed_files_contents["testrun_files"]): @@ -491,6 +583,28 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): if testrun["type"] == "changed": with open(github_step_summary, "a", encoding="utf-8") as wfh: wfh.write("Partial test run chosen.\n") + testrun["selected_tests"] = { + "core": False, + "slow": False, + "fast": True, + "flaky": False, + } + if "test:slow" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Slow tests chosen by `test:slow` label.\n") + testrun["selected_tests"]["slow"] = True + if "test:core" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Core tests chosen by `test:core` label.\n") + testrun["selected_tests"]["core"] = True + if "test:no-fast" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Fast tests deselected by `test:no-fast` label.\n") + testrun["selected_tests"]["fast"] = False + if "test:flaky-jail" in labels: + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write("Flaky jailed tests chosen by `test:flaky-jail` label.\n") + testrun["selected_tests"]["flaky"] = True if selected_changed_files: with open(github_step_summary, "a", encoding="utf-8") as wfh: wfh.write( @@ -516,17 +630,50 @@ def define_testrun(ctx: Context, event_name: str, changed_files: pathlib.Path): "distro_slug": { "help": "The distribution slug to generate the matrix for", }, + "full": { + "help": "Full test run", + }, + "workflow": { + "help": "Which workflow is running", + }, + "fips": { + "help": "Include FIPS entries in the matrix", + }, }, ) -def matrix(ctx: Context, distro_slug: str): +def matrix( + ctx: Context, + distro_slug: str, + full: bool = False, + workflow: str = "ci", + fips: bool = False, +): """ Generate the test matrix. """ _matrix = [] + _splits = { + "functional": 3, + "integration": 5, + "scenarios": 1, + "unit": 2, + } + # On nightly and scheduled builds we don't want splits at all + if workflow.lower() in ("nightly", "scheduled"): + ctx.info(f"Reducing splits definition since workflow is '{workflow}'") + for key in _splits: + new_value = _splits[key] - 2 + if new_value < 1: + new_value = 1 + _splits[key] = new_value + for transport in ("zeromq", "tcp"): if transport == "tcp": if distro_slug not in ( "centosstream-9", + "centosstream-9-arm64", + "photonos-5", + "photonos-5-arm64", "ubuntu-22.04", "ubuntu-22.04-arm64", ): @@ -539,165 +686,557 @@ def matrix(ctx: Context, distro_slug: str): continue if "macos" in distro_slug and chunk == "scenarios": continue - _matrix.append({"transport": transport, "tests-chunk": chunk}) - print(json.dumps(_matrix)) + splits = _splits.get(chunk) or 1 + if full and splits > 1: + for split in range(1, splits + 1): + _matrix.append( + { + "transport": transport, + "tests-chunk": chunk, + "test-group": split, + "test-group-count": splits, + } + ) + if fips is True and distro_slug.startswith( + ("photonos-4", "photonos-5") + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) + else: + _matrix.append({"transport": transport, "tests-chunk": chunk}) + if fips is True and distro_slug.startswith( + ("photonos-4", "photonos-5") + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) + + ctx.info("Generated matrix:") + ctx.print(_matrix, soft_wrap=True) + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"matrix={json.dumps(_matrix)}\n") ctx.exit(0) @ci.command( - name="transport-matrix", + name="pkg-matrix", arguments={ "distro_slug": { "help": "The distribution slug to generate the matrix for", }, + "pkg_type": { + "help": "The type of package we are testing against", + }, + "testing_releases": { + "help": "The salt releases to test upgrades against", + "nargs": "+", + "required": True, + }, + "fips": { + "help": "Include FIPS entries in the matrix", + }, }, ) -def transport_matrix(ctx: Context, distro_slug: str): +def pkg_matrix( + ctx: Context, + distro_slug: str, + pkg_type: str, + testing_releases: list[tools.utils.Version] = None, + fips: bool = False, +): """ Generate the test matrix. """ - _matrix = [] - for transport in ("zeromq", "tcp"): - if transport == "tcp": - if distro_slug not in ( - "centosstream-9", - "ubuntu-22.04", - "ubuntu-22.04-arm64", - ): - # Only run TCP transport tests on these distributions - continue - _matrix.append({"transport": transport}) - print(json.dumps(_matrix)) + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") + if TYPE_CHECKING: + assert testing_releases + + still_testing_3005 = False + for release_version in testing_releases: + if still_testing_3005: + break + if release_version < tools.utils.Version("3006.0"): + still_testing_3005 = True + + if still_testing_3005 is False: + ctx.error( + f"No longer testing 3005.x releases please update {__file__} " + "and remove this error and the logic above the error. There may " + "be other places that need code removed as well." + ) + ctx.exit(1) + + adjusted_versions = [] + for ver in testing_releases: + if ver < tools.utils.Version("3006.0"): + adjusted_versions.append((ver, "classic")) + adjusted_versions.append((ver, "tiamat")) + else: + adjusted_versions.append((ver, "relenv")) + ctx.info(f"Will look for the following versions: {adjusted_versions}") + + # Filter out the prefixes to look under + if "macos-" in distro_slug: + # We don't have golden images for macos, handle these separately + prefixes = { + "classic": "osx/", + "tiamat": "salt/py3/macos/minor/", + "relenv": "salt/py3/macos/minor/", + } + else: + parts = distro_slug.split("-") + name = parts[0] + version = parts[1] + if name in ("debian", "ubuntu"): + arch = "amd64" + elif name in ("centos", "centosstream", "amazonlinux", "photonos"): + arch = "x86_64" + if len(parts) > 2: + arch = parts[2] + if name == "amazonlinux": + name = "amazon" + if "centos" in name: + name = "redhat" + if "photon" in name: + name = "photon" + if name == "windows": + prefixes = { + "classic": "windows/", + "tiamat": "salt/py3/windows/minor", + "relenv": "salt/py3/windows/minor", + } + else: + prefixes = { + "classic": f"py3/{name}/{version}/{arch}/", + "tiamat": f"salt/py3/{name}/{version}/{arch}/minor/", + "relenv": f"salt/py3/{name}/{version}/{arch}/minor/", + } + + s3 = boto3.client("s3") + paginator = s3.get_paginator("list_objects_v2") + _matrix = [ + { + "test-chunk": "install", + "version": None, + } + ] + + for version, backend in adjusted_versions: + prefix = prefixes[backend] + # TODO: Remove this after 3009.0 + if backend == "relenv" and version >= tools.utils.Version("3006.5"): + prefix.replace("/arm64/", "/aarch64/") + # Using a paginator allows us to list recursively and avoid the item limit + page_iterator = paginator.paginate( + Bucket=f"salt-project-{tools.utils.SPB_ENVIRONMENT}-salt-artifacts-release", + Prefix=prefix, + ) + # Uses a jmespath expression to test if the wanted version is in any of the filenames + key_filter = f"Contents[?contains(Key, '{version}')][]" + if pkg_type == "MSI": + # TODO: Add this back when we add MSI upgrade and downgrade tests + # key_filter = f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.msi')]" + continue + elif pkg_type == "NSIS": + key_filter = ( + f"Contents[?contains(Key, '{version}')] | [?ends_with(Key, '.exe')]" + ) + objects = list(page_iterator.search(key_filter)) + # Testing using `any` because sometimes the paginator returns `[None]` + if any(objects): + ctx.info( + f"Found {version} ({backend}) for {distro_slug}: {objects[0]['Key']}" + ) + for session in ("upgrade", "downgrade"): + if backend == "classic": + session += "-classic" + _matrix.append( + { + "test-chunk": session, + "version": str(version), + } + ) + if ( + backend == "relenv" + and fips is True + and distro_slug.startswith(("photonos-4", "photonos-5")) + ): + # Repeat the last one, but with fips + _matrix.append({"fips": "fips", **_matrix[-1]}) + else: + ctx.info(f"No {version} ({backend}) for {distro_slug} at {prefix}") + + ctx.info("Generated matrix:") + ctx.print(_matrix, soft_wrap=True) + + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"matrix={json.dumps(_matrix)}\n") ctx.exit(0) @ci.command( - name="rerun-workflow", + name="get-releases", + arguments={ + "repository": { + "help": "The repository to query for releases, e.g. saltstack/salt", + }, + }, ) -def rerun_workflow(ctx: Context): +def get_releases(ctx: Context, repository: str = "saltstack/salt"): """ - Re-run failed workflows, up to a maximum of 3 times. + Generate the latest salt release. + """ + github_output = os.environ.get("GITHUB_OUTPUT") + + if github_output is None: + ctx.exit(1, "The 'GITHUB_OUTPUT' variable is not set.") + else: + releases = tools.utils.get_salt_releases(ctx, repository) + str_releases = [str(version) for version in releases] + latest = str_releases[-1] + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"latest-release={latest}\n") + wfh.write(f"releases={json.dumps(str_releases)}\n") + ctx.exit(0) - Only restarts workflows for which less than 25% of the jobs failed. + +@ci.command( + name="get-pr-test-labels", + arguments={ + "pr": { + "help": "Pull request number", + }, + "repository": { + "help": "Github repository.", + }, + }, +) +def get_pr_test_labels( + ctx: Context, repository: str = "saltstack/salt", pr: int = None +): + """ + Set the pull-request labels. """ gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None if gh_event_path is None: - ctx.warn("The 'GITHUB_EVENT_PATH' variable is not set.") - ctx.exit(1) + labels = _get_pr_test_labels_from_api(ctx, repository, pr=pr) + else: + if TYPE_CHECKING: + assert gh_event_path is not None + + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + ctx.exit(1) + + if "pull_request" not in gh_event: + ctx.warn("The 'pull_request' key was not found on the event payload.") + ctx.exit(1) + + pr = gh_event["pull_request"]["number"] + labels = _get_pr_test_labels_from_event_payload(gh_event) + + if labels: + ctx.info(f"Test labels for pull-request #{pr} on {repository}:") + for name, description in labels: + ctx.info(f" * [yellow]{name}[/yellow]: {description}") + else: + ctx.info(f"No test labels for pull-request #{pr} on {repository}") + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.exit(0) if TYPE_CHECKING: - assert gh_event_path is not None + assert github_output is not None - try: - gh_event = json.loads(open(gh_event_path).read()) - except Exception as exc: - ctx.error(f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc) - ctx.exit(1) + ctx.info("Writing 'labels' to the github outputs file") + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"labels={json.dumps([label[0] for label in labels])}\n") + ctx.exit(0) - workflow_run = gh_event["workflow_run"] - ctx.info( - f"Processing Workflow ID {workflow_run['id']}, attempt {workflow_run['run_attempt']}..." - ) - if workflow_run["run_attempt"] >= 3: - ctx.info( - f"This workflow has failed for the past {workflow_run['run_attempt']} attempts. " - "Not re-running it." + +def _get_pr_test_labels_from_api( + ctx: Context, repository: str = "saltstack/salt", pr: int = None +) -> list[tuple[str, str]]: + """ + Set the pull-request labels. + """ + if pr is None: + ctx.error( + "Could not find the 'GITHUB_EVENT_PATH' variable and the " + "--pr flag was not passed. Unable to detect pull-request number." ) - ctx.exit(0) + ctx.exit(1) + with ctx.web as web: + headers = { + "Accept": "application/vnd.github+json", + } + github_token = tools.utils.gh.get_github_token(ctx) + if github_token is not None: + headers["Authorization"] = f"Bearer {github_token}" + web.headers.update(headers) + ret = web.get(f"https://api.github.com/repos/{repository}/pulls/{pr}") + if ret.status_code != 200: + ctx.error( + f"Failed to get the #{pr} pull-request details on repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + pr_details = ret.json() + return _filter_test_labels(pr_details["labels"]) - run_id = str(workflow_run["id"]) - repository = workflow_run["repository"]["full_name"] - page = 1 - total = failed = 0 - # Get all jobs from workflow run to see how many failed - while True: - cmdline = [ - "gh", - "api", - "-H", - "Accept: application/vnd.github+json", - f"/repos/{repository}/actions/runs/{run_id}/jobs?filter=latest&per_page=100&page={page}", - ] - ret = ctx.run(*cmdline, capture=True, check=False) - if ret.returncode: - ctx.error("Failed to get the jobs for the workflow run") - ctx.exit(0) - jobs = json.loads(ret.stdout.strip().decode())["jobs"] - if not jobs: - break +def _get_pr_test_labels_from_event_payload( + gh_event: dict[str, Any] +) -> list[tuple[str, str]]: + """ + Get the pull-request test labels. + """ + if "pull_request" not in gh_event: + return [] + return _filter_test_labels(gh_event["pull_request"]["labels"]) + + +def _filter_test_labels(labels: list[dict[str, Any]]) -> list[tuple[str, str]]: + return [ + (label["name"], label["description"]) + for label in labels + if label["name"].startswith("test:") + ] + + +@ci.command( + name="get-testing-releases", + arguments={ + "releases": { + "help": "The list of releases of salt", + "nargs": "*", + }, + "salt_version": { + "help": "The version of salt being tested against", + "required": True, + }, + }, +) +def get_testing_releases( + ctx: Context, + releases: list[tools.utils.Version], + salt_version: str = None, +): + """ + Get a list of releases to use for the upgrade and downgrade tests. + """ + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.exit(1, "The 'GITHUB_OUTPUT' variable is not set.") + else: + # We aren't testing upgrades from anything before 3006.0 except the latest 3005.x + threshold_major = 3005 + parsed_salt_version = tools.utils.Version(salt_version) + # We want the latest 4 major versions, removing the oldest if this version is a new major + num_major_versions = 4 + if parsed_salt_version.minor == 0: + num_major_versions = 3 + majors = sorted( + list( + { + version.major + for version in releases + if version.major >= threshold_major + } + ) + )[-num_major_versions:] + testing_releases = [] + # Append the latest minor for each major + for major in majors: + minors_of_major = [ + version for version in releases if version.major == major + ] + testing_releases.append(minors_of_major[-1]) + + str_releases = [str(version) for version in testing_releases] - for job in jobs: - total += 1 - if job["conclusion"] == "failure": - failed += 1 - page += 1 + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"testing-releases={json.dumps(str_releases)}\n") - ctx.info(f"{failed} out of {total} jobs failed.") - if failed > total / 2: - ctx.info("More than half of the jobs failed. Not automatically restarting.") ctx.exit(0) - cmdline = [ - "gh", - "run", - "-R", - repository, - "rerun", - run_id, - "--failed", - ] - ctx.info(f"Running {' '.join(cmdline)!r} ...") - ret = ctx.run(*cmdline, check=False) - if ret.returncode: - ctx.error("Failed to re-run workflow") + +@ci.command( + name="define-cache-seed", + arguments={ + "static_cache_seed": { + "help": "The static cache seed value", + }, + "randomize": { + "help": "Randomize the cache seed value", + }, + }, +) +def define_cache_seed(ctx: Context, static_cache_seed: str, randomize: bool = False): + """ + Set `cache-seed` in GH Actions outputs. + """ + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert github_output is not None + + github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY") + if github_step_summary is None: + ctx.warn("The 'GITHUB_STEP_SUMMARY' variable is not set.") + ctx.exit(1) + + if TYPE_CHECKING: + assert github_step_summary is not None + + labels: list[str] = [] + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is not None: + try: + gh_event = json.loads(open(gh_event_path).read()) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + ctx.exit(1) + + labels.extend( + label[0] for label in _get_pr_test_labels_from_event_payload(gh_event) + ) + + if randomize is True: + cache_seed = f"SEED-{random.randint(100, 1000)}" + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write( + f"The cache seed has been randomized to `{cache_seed}` because " + "`--randomize` was passed to `tools ci define-cache-seed`." + ) + elif "test:random-cache-seed" in labels: + cache_seed = f"SEED-{random.randint(100, 1000)}" + with open(github_step_summary, "a", encoding="utf-8") as wfh: + wfh.write( + f"The cache seed has been randomized to `{cache_seed}` because " + "the label `test:random-cache-seed` was set." + ) else: - ctx.info("Restarted workflow successfully") - ctx.exit(0) + cache_seed = static_cache_seed + + ctx.info("Writing 'cache-seed' to the github outputs file") + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"cache-seed={cache_seed}\n") @ci.command( - name="pkg-matrix", + name="upload-coverage", arguments={ - "distro_slug": { - "help": "The distribution slug to generate the matrix for", + "commit_sha": { + "help": "The commit SHA", + "required": True, }, - "pkg_type": { - "help": "The distribution slug to generate the matrix for", + "reports_path": { + "help": "The path to the directory containing the XML Coverage Reports", }, }, ) -def pkg_matrix(ctx: Context, distro_slug: str, pkg_type: str): +def upload_coverage(ctx: Context, reports_path: pathlib.Path, commit_sha: str = None): """ - Generate the test matrix. + Upload code coverage to codecov. """ - _matrix = [] - sessions = [ - "test-pkgs-onedir", + codecov = shutil.which("codecov") + if not codecov: + ctx.error("Could not find the path to the 'codecov' binary") + ctx.exit(1) + + if TYPE_CHECKING: + assert commit_sha is not None + + codecov_args = [ + codecov, + "--nonZero", + "--sha", + commit_sha, ] - if ( - distro_slug - not in [ - "debian-11-arm64", - "ubuntu-20.04-arm64", - "ubuntu-22.04-arm64", - ] - and "MSI" != pkg_type - ): - # These OS's never had arm64 packages built for them - # with the tiamate onedir packages. - # we will need to ensure when we release 3006.0 - # we allow for 3006.0 jobs to run, because then - # we will have arm64 onedir packages to upgrade from - sessions.append("'test-upgrade-pkgs-onedir(classic=False)'") - if ( - distro_slug not in ["centosstream-9", "ubuntu-22.04", "ubuntu-22.04-arm64"] - and "MSI" != pkg_type - ): - # Packages for these OSs where never built for classic previously - sessions.append("'test-upgrade-pkgs-onedir(classic=True)'") - for sess in sessions: - _matrix.append({"nox-session": sess}) - print(json.dumps(_matrix)) + gh_event_path = os.environ.get("GITHUB_EVENT_PATH") or None + if gh_event_path is not None: + try: + gh_event = json.loads(open(gh_event_path).read()) + pr_event_data = gh_event.get("pull_request") + if pr_event_data: + codecov_args.extend(["--parent", pr_event_data["base"]["sha"]]) + except Exception as exc: + ctx.error( + f"Could not load the GH Event payload from {gh_event_path!r}:\n", exc + ) + + sleep_time = 15 + for fpath in reports_path.glob("*.xml"): + if fpath.name in ("salt.xml", "tests.xml"): + flags = fpath.stem + else: + try: + section, distro_slug, nox_session = fpath.stem.split("..") + except ValueError: + ctx.error( + f"The file {fpath} does not respect the expected naming convention " + "'{salt|tests}.....xml'. Skipping..." + ) + continue + flags = f"{section},{distro_slug}" + + max_attempts = 3 + current_attempt = 0 + while True: + current_attempt += 1 + ctx.info( + f"Uploading '{fpath}' coverage report to codecov (attempt {current_attempt} of {max_attempts}) ..." + ) + + ret = ctx.run( + *codecov_args, + "--file", + str(fpath), + "--name", + fpath.stem, + "--flags", + flags, + check=False, + capture=True, + ) + stdout = ret.stdout.strip().decode() + stderr = ret.stderr.strip().decode() + if ret.returncode == 0: + ctx.console_stdout.print(stdout) + ctx.console.print(stderr) + break + + if ( + "Too many uploads to this commit" in stdout + or "Too many uploads to this commit" in stderr + ): + # Let's just stop trying + ctx.console_stdout.print(stdout) + ctx.console.print(stderr) + break + + if current_attempt >= max_attempts: + ctx.error(f"Failed to upload {fpath} to codecov") + ctx.exit(1) + + ctx.warn(f"Waiting {sleep_time} seconds until next retry...") + time.sleep(sleep_time) + ctx.exit(0) diff --git a/tools/pkg.py b/tools/pkg/__init__.py similarity index 90% rename from tools/pkg.py rename to tools/pkg/__init__.py index fc1edea711d8..056129966553 100644 --- a/tools/pkg.py +++ b/tools/pkg/__init__.py @@ -154,7 +154,7 @@ def set_salt_version( ret = venv.run_code(code, capture=True, check=False) if ret.returncode: ctx.error(ret.stderr.decode()) - ctx.exit(ctx.returncode) + ctx.exit(ret.returncode) salt_version = ret.stdout.strip().decode() if not tools.utils.REPO_ROOT.joinpath("salt").is_dir(): @@ -223,7 +223,7 @@ def set_salt_version( ) def pre_archive_cleanup(ctx: Context, cleanup_path: str, pkg: bool = False): """ - Clean the provided path of paths that shouyld not be included in the archive. + Clean the provided path of paths that should not be included in the archive. For example: @@ -429,3 +429,48 @@ def pypi_upload(ctx: Context, files: list[pathlib.Path], test: bool = False): if ret.returncode: ctx.error(ret.stderr.strip().decode()) ctx.exit(ret.returncode) + + +@pkg.command( + name="configure-git", + arguments={ + "user": { + "help": "The git global username", + "required": False, + }, + "email": { + "help": "The git global email", + "required": False, + }, + }, +) +def configure_git( + ctx: Context, + user: str = "Salt Project Packaging", + email: str = "saltproject-packaging@vmware.com", +): + cwd = pathlib.Path.cwd() + ctx.info("Setting name and email in git global config") + ctx.run("git", "config", "--global", "user.name", f"'{user}'") + ctx.run("git", "config", "--global", "user.email", f"{email}") + ctx.info(f"Adding {str(cwd)} as a safe directory") + ctx.run("git", "config", "--global", "--add", "safe.directory", str(cwd)) + + +@pkg.command( + name="apply-release-patch", + arguments={ + "patch": {"help": "The git global username"}, + "delete": { + "help": "Whether to delete the patch after applying", + "required": False, + }, + }, +) +def apply_release_patch(ctx: Context, patch: pathlib.Path, delete: bool = False): + patch = patch.resolve() + ctx.info("Applying the release patch") + ctx.run("git", "am", "--committer-date-is-author-date", patch.name) + if delete: + ctx.info("Deleting the release patch because --delete was passed") + patch.unlink() diff --git a/tools/pkg/build.py b/tools/pkg/build.py new file mode 100644 index 000000000000..90f34947bd5f --- /dev/null +++ b/tools/pkg/build.py @@ -0,0 +1,763 @@ +""" +These commands are used to build the salt onedir and system packages. +""" +# pylint: disable=resource-leakage,broad-except +from __future__ import annotations + +import json +import logging +import os +import pathlib +import shutil +import tarfile +import zipfile +from typing import TYPE_CHECKING + +import yaml +from ptscripts import Context, command_group + +import tools.utils + +log = logging.getLogger(__name__) + +# Define the command group +build = command_group( + name="build", + help="Package build related commands.", + description=__doc__, + parent="pkg", +) + + +def _get_shared_constants(): + shared_constants = ( + tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" + ) + return yaml.safe_load(shared_constants.read_text()) + + +@build.command( + name="deb", + arguments={ + "onedir": { + "help": "The path to the onedir artifact", + }, + "relenv_version": { + "help": "The version of relenv to use", + }, + "python_version": { + "help": "The version of python to build with using relenv", + }, + "arch": { + "help": "The arch to build for", + }, + }, +) +def debian( + ctx: Context, + onedir: str = None, # pylint: disable=bad-whitespace + relenv_version: str = None, + python_version: str = None, + arch: str = None, +): + """ + Build the deb package. + """ + checkout = pathlib.Path.cwd() + env_args = ["-e", "SALT_ONEDIR_ARCHIVE"] + if onedir: + onedir_artifact = checkout / "artifacts" / onedir + _check_pkg_build_files_exist(ctx, onedir_artifact=onedir_artifact) + ctx.info( + f"Building the package using the onedir artifact {str(onedir_artifact)}" + ) + os.environ["SALT_ONEDIR_ARCHIVE"] = str(onedir_artifact) + else: + if arch is None: + ctx.error( + "Building the package from the source files but the arch to build for has not been given" + ) + ctx.exit(1) + ctx.info("Building the package from the source files") + shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version + new_env = { + "SALT_RELENV_VERSION": relenv_version, + "SALT_PYTHON_VERSION": python_version, + "SALT_PACKAGE_ARCH": str(arch), + "RELENV_FETCH_VERSION": relenv_version, + } + for key, value in new_env.items(): + os.environ[key] = value + env_args.extend(["-e", key]) + + env = os.environ.copy() + env["PIP_CONSTRAINT"] = str( + tools.utils.REPO_ROOT / "requirements" / "constraints.txt" + ) + + ctx.run("ln", "-sf", "pkg/debian/", ".") + ctx.run("debuild", *env_args, "-uc", "-us", env=env) + + ctx.info("Done") + + +@build.command( + name="rpm", + arguments={ + "onedir": { + "help": "The path to the onedir artifact", + }, + "relenv_version": { + "help": "The version of relenv to use", + }, + "python_version": { + "help": "The version of python to build with using relenv", + }, + "arch": { + "help": "The arch to build for", + }, + }, +) +def rpm( + ctx: Context, + onedir: str = None, # pylint: disable=bad-whitespace + relenv_version: str = None, + python_version: str = None, + arch: str = None, +): + """ + Build the RPM package. + """ + checkout = pathlib.Path.cwd() + if onedir: + onedir_artifact = checkout / "artifacts" / onedir + _check_pkg_build_files_exist(ctx, onedir_artifact=onedir_artifact) + ctx.info( + f"Building the package using the onedir artifact {str(onedir_artifact)}" + ) + os.environ["SALT_ONEDIR_ARCHIVE"] = str(onedir_artifact) + else: + ctx.info(f"Building the package from the source files") + if arch is None: + ctx.error( + "Building the package from the source files but the arch to build for has not been given" + ) + ctx.exit(1) + ctx.info(f"Building the package from the source files") + shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version + new_env = { + "SALT_RELENV_VERSION": relenv_version, + "SALT_PYTHON_VERSION": python_version, + "SALT_PACKAGE_ARCH": str(arch), + "RELENV_FETCH_VERSION": relenv_version, + } + for key, value in new_env.items(): + os.environ[key] = value + + env = os.environ.copy() + env["PIP_CONSTRAINT"] = str( + tools.utils.REPO_ROOT / "requirements" / "constraints.txt" + ) + spec_file = checkout / "pkg" / "rpm" / "salt.spec" + ctx.run( + "rpmbuild", "-bb", f"--define=_salt_src {checkout}", str(spec_file), env=env + ) + + ctx.info("Done") + + +@build.command( + name="macos", + arguments={ + "onedir": { + "help": "The name of the onedir artifact, if given it should be under artifacts/", + }, + "salt_version": { + "help": ( + "The salt version for which to build the repository configuration files. " + "If not passed, it will be discovered by running 'python3 salt/version.py'." + ), + "required": True, + }, + "sign": { + "help": "Sign and notorize built package", + }, + "relenv_version": { + "help": "The version of relenv to use", + }, + "python_version": { + "help": "The version of python to build with using relenv", + }, + }, +) +def macos( + ctx: Context, + onedir: str = None, + salt_version: str = None, + sign: bool = False, + relenv_version: str = None, + python_version: str = None, +): + """ + Build the macOS package. + """ + if TYPE_CHECKING: + assert onedir is not None + assert salt_version is not None + + checkout = pathlib.Path.cwd() + if onedir: + onedir_artifact = checkout / "artifacts" / onedir + ctx.info(f"Building package from existing onedir: {str(onedir_artifact)}") + _check_pkg_build_files_exist(ctx, onedir_artifact=onedir_artifact) + + build_root = checkout / "pkg" / "macos" / "build" / "opt" + build_root.mkdir(parents=True, exist_ok=True) + ctx.info(f"Extracting the onedir artifact to {build_root}") + with tarfile.open(str(onedir_artifact)) as tarball: + with ctx.chdir(onedir_artifact.parent): + tarball.extractall(path=build_root) + else: + ctx.info("Building package without an existing onedir") + + if not onedir: + # Prep the salt onedir if not building from an existing one + shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version + os.environ["RELENV_FETCH_VERSION"] = relenv_version + with ctx.chdir(checkout / "pkg" / "macos"): + ctx.info("Fetching relenv python") + ctx.run( + "./build_python.sh", + "--version", + python_version, + "--relenv-version", + relenv_version, + ) + + ctx.info("Installing salt into the relenv python") + ctx.run("./install_salt.sh") + + if sign: + ctx.info("Signing binaries") + with ctx.chdir(checkout / "pkg" / "macos"): + ctx.run("./sign_binaries.sh") + ctx.info("Building the macos package") + with ctx.chdir(checkout / "pkg" / "macos"): + ctx.run("./prep_salt.sh") + if sign: + package_args = ["--sign", salt_version] + else: + package_args = [salt_version] + ctx.run("./package.sh", *package_args) + if sign: + ctx.info("Notarizing package") + ret = ctx.run("uname", "-m", capture=True) + cpu_arch = ret.stdout.strip().decode() + with ctx.chdir(checkout / "pkg" / "macos"): + ctx.run("./notarize.sh", f"salt-{salt_version}-py3-{cpu_arch}.pkg") + + ctx.info("Done") + + +@build.command( + name="windows", + arguments={ + "onedir": { + "help": "The name of the onedir artifact, if given it should be under artifacts/", + }, + "salt_version": { + "help": ( + "The salt version for which to build the repository configuration files. " + "If not passed, it will be discovered by running 'python3 salt/version.py'." + ), + "required": True, + }, + "arch": { + "help": "The architecture to build the package for", + "choices": ("x86", "amd64"), + "required": True, + }, + "sign": { + "help": "Sign and notarize built package", + }, + "relenv_version": { + "help": "The version of relenv to use", + }, + "python_version": { + "help": "The version of python to build with using relenv", + }, + }, +) +def windows( + ctx: Context, + onedir: str = None, + salt_version: str = None, + arch: str = None, + sign: bool = False, + relenv_version: str = None, + python_version: str = None, +): + """ + Build the Windows package. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert arch is not None + + shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version + os.environ["RELENV_FETCH_VERSION"] = relenv_version + + build_cmd = [ + "powershell.exe", + "&", + "pkg/windows/build.cmd", + "-Architecture", + arch, + "-Version", + salt_version, + "-PythonVersion", + python_version, + "-RelenvVersion", + relenv_version, + "-CICD", + ] + + checkout = pathlib.Path.cwd() + if onedir: + build_cmd.append("-SkipInstall") + onedir_artifact = checkout / "artifacts" / onedir + ctx.info(f"Building package from existing onedir: {str(onedir_artifact)}") + _check_pkg_build_files_exist(ctx, onedir_artifact=onedir_artifact) + + unzip_dir = checkout / "pkg" / "windows" + ctx.info(f"Unzipping the onedir artifact to {unzip_dir}") + with zipfile.ZipFile(onedir_artifact, mode="r") as archive: + archive.extractall(unzip_dir) + + move_dir = unzip_dir / "salt" + build_env = unzip_dir / "buildenv" + _check_pkg_build_files_exist(ctx, move_dir=move_dir) + + ctx.info(f"Moving {move_dir} directory to the build environment in {build_env}") + shutil.move(move_dir, build_env) + else: + build_cmd.append("-Build") + ctx.info("Building package without an existing onedir") + + ctx.info(f"Running: {' '.join(build_cmd)} ...") + ctx.run(*build_cmd) + + if sign: + env = os.environ.copy() + envpath = env.get("PATH") + if envpath is None: + path_parts = [] + else: + path_parts = envpath.split(os.pathsep) + path_parts.extend( + [ + r"C:\Program Files (x86)\Windows Kits\10\App Certification Kit", + r"C:\Program Files (x86)\Microsoft SDKs\Windows\v10.0A\bin\NETFX 4.8 Tools", + r"C:\Program Files\DigiCert\DigiCert One Signing Manager Tools", + ] + ) + env["PATH"] = os.pathsep.join(path_parts) + command = ["smksp_registrar.exe", "list"] + ctx.info(f"Running: '{' '.join(command)}' ...") + ctx.run(*command, env=env) + command = ["smctl.exe", "keypair", "ls"] + ctx.info(f"Running: '{' '.join(command)}' ...") + ret = ctx.run(*command, env=env, check=False) + if ret.returncode: + ctx.error(f"Failed to run '{' '.join(command)}'") + command = [ + r"C:\Windows\System32\certutil.exe", + "-csp", + "DigiCert Signing Manager KSP", + "-key", + "-user", + ] + ctx.info(f"Running: '{' '.join(command)}' ...") + ret = ctx.run(*command, env=env, check=False) + if ret.returncode: + ctx.error(f"Failed to run '{' '.join(command)}'") + + command = ["smksp_cert_sync.exe"] + ctx.info(f"Running: '{' '.join(command)}' ...") + ret = ctx.run(*command, env=env, check=False) + if ret.returncode: + ctx.error(f"Failed to run '{' '.join(command)}'") + + for fname in ( + f"pkg/windows/build/Salt-Minion-{salt_version}-Py3-{arch}-Setup.exe", + f"pkg/windows/build/Salt-Minion-{salt_version}-Py3-{arch}.msi", + ): + fpath = str(pathlib.Path(fname).resolve()) + ctx.info(f"Signing {fname} ...") + ctx.run( + "signtool.exe", + "sign", + "/sha1", + os.environ["WIN_SIGN_CERT_SHA1_HASH"], + "/tr", + "http://timestamp.digicert.com", + "/td", + "SHA256", + "/fd", + "SHA256", + fpath, + env=env, + ) + ctx.info(f"Verifying {fname} ...") + ctx.run("signtool.exe", "verify", "/v", "/pa", fpath, env=env) + + ctx.info("Done") + + +@build.command( + name="onedir-dependencies", + arguments={ + "arch": { + "help": "The architecture to build the package for", + "choices": ("x86_64", "aarch64", "x86", "amd64"), + "required": True, + }, + "python_version": { + "help": "The version of python to create an environment for using relenv", + "required": True, + }, + "relenv_version": { + "help": "The version of relenv to use", + }, + "package_name": { + "help": "The name of the relenv environment to be created", + "required": True, + }, + "platform": { + "help": "The platform the relenv environment is being created on", + "required": True, + }, + }, +) +def onedir_dependencies( + ctx: Context, + arch: str = None, + python_version: str = None, + relenv_version: str = None, + package_name: str = None, + platform: str = None, +): + """ + Create a relenv environment with the onedir dependencies installed. + + NOTE: relenv needs to be installed into your environment and builds and toolchains (linux) fetched. + """ + if TYPE_CHECKING: + assert arch is not None + assert python_version is not None + assert package_name is not None + assert platform is not None + + if platform in ("macos", "darwin") and arch == "aarch64": + arch = "arm64" + + shared_constants = _get_shared_constants() + if not python_version: + python_version = shared_constants["python_version"] + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert python_version + assert relenv_version + os.environ["RELENV_FETCH_VERSION"] = relenv_version + + # We import relenv here because it is not a hard requirement for the rest of the tools commands + try: + import relenv.create + except ImportError: + ctx.exit(1, "Relenv not installed in the current environment.") + + dest = pathlib.Path(package_name).resolve() + relenv.create.create(dest, arch=arch, version=python_version) + + # Validate that we're using the relenv version we really want to + if platform == "windows": + env_scripts_dir = dest / "Scripts" + else: + env_scripts_dir = dest / "bin" + + ret = ctx.run( + str(env_scripts_dir / "relenv"), "--version", capture=True, check=False + ) + if ret.returncode: + ctx.error(f"Failed to get the relenv version: {ret}") + ctx.exit(1) + + env_relenv_version = ret.stdout.strip().decode() + if env_relenv_version != relenv_version: + ctx.error( + f"The onedir installed relenv version({env_relenv_version}) is not " + f"the relenv version which should be used({relenv_version})." + ) + ctx.exit(1) + + ctx.info( + f"The relenv version installed in the onedir env({env_relenv_version}) " + f"matches the version which must be used." + ) + + env = os.environ.copy() + install_args = ["-v"] + if platform == "windows": + python_bin = env_scripts_dir / "python" + else: + env["RELENV_BUILDENV"] = "1" + python_bin = env_scripts_dir / "python3" + install_args.extend( + [ + "--use-pep517", + "--no-cache-dir", + "--no-binary=:all:", + ] + ) + + version_info = ctx.run( + str(python_bin), + "-c", + "import sys; print('{}.{}'.format(*sys.version_info))", + capture=True, + ) + requirements_version = version_info.stdout.strip().decode() + requirements_file = ( + tools.utils.REPO_ROOT + / "requirements" + / "static" + / "pkg" + / f"py{requirements_version}" + / f"{platform}.txt" + ) + _check_pkg_build_files_exist(ctx, requirements_file=requirements_file) + + env["PIP_CONSTRAINT"] = str( + tools.utils.REPO_ROOT / "requirements" / "constraints.txt" + ) + ctx.run( + str(python_bin), + "-m", + "pip", + "install", + "-U", + "setuptools", + "pip", + "wheel", + env=env, + ) + ctx.run( + str(python_bin), + "-m", + "pip", + "install", + *install_args, + "-r", + str(requirements_file), + env=env, + ) + + +@build.command( + name="salt-onedir", + arguments={ + "salt_name": { + "help": "The path to the salt code to install, relative to the repo root", + }, + "platform": { + "help": "The platform that installed is being installed on", + "required": True, + }, + "package_name": { + "help": "The name of the relenv environment to install salt into", + "required": True, + }, + "relenv_version": { + "help": "The version of relenv to use", + }, + }, +) +def salt_onedir( + ctx: Context, + salt_name: str, + platform: str = None, + package_name: str = None, + relenv_version: str = None, +): + """ + Install salt into a relenv onedir environment. + """ + if TYPE_CHECKING: + assert platform is not None + assert package_name is not None + + shared_constants = _get_shared_constants() + if not relenv_version: + relenv_version = shared_constants["relenv_version"] + if TYPE_CHECKING: + assert relenv_version + os.environ["RELENV_FETCH_VERSION"] = relenv_version + + salt_archive = pathlib.Path(salt_name).resolve() + onedir_env = pathlib.Path(package_name).resolve() + _check_pkg_build_files_exist(ctx, onedir_env=onedir_env, salt_archive=salt_archive) + + # Validate that we're using the relenv version we really want to + if platform == "windows": + env_scripts_dir = onedir_env / "Scripts" + else: + env_scripts_dir = onedir_env / "bin" + + ret = ctx.run( + str(env_scripts_dir / "relenv"), "--version", capture=True, check=False + ) + if ret.returncode: + ctx.error(f"Failed to get the relenv version: {ret}") + ctx.exit(1) + + env_relenv_version = ret.stdout.strip().decode() + if env_relenv_version != relenv_version: + ctx.error( + f"The onedir installed relenv version({env_relenv_version}) is not " + f"the relenv version which should be used({relenv_version})." + ) + ctx.exit(1) + + ctx.info( + f"The relenv version installed in the onedir env({env_relenv_version}) " + f"matches the version which must be used." + ) + + env = os.environ.copy() + env["USE_STATIC_REQUIREMENTS"] = "1" + env["RELENV_BUILDENV"] = "1" + if platform == "windows": + ctx.run( + "powershell.exe", + r"pkg\windows\install_salt.cmd", + "-BuildDir", + str(onedir_env), + "-CICD", + "-SourceTarball", + str(salt_archive), + env=env, + ) + ctx.run( + "powershell.exe", + r"pkg\windows\prep_salt.cmd", + "-BuildDir", + str(onedir_env), + "-CICD", + env=env, + ) + python_executable = str(env_scripts_dir / "python.exe") + ret = ctx.run( + python_executable, + "-c", + "import json, sys, site, pathlib; sys.stdout.write(json.dumps([pathlib.Path(p).as_posix() for p in site.getsitepackages()]))", + capture=True, + ) + if ret.returncode: + ctx.error(f"Failed to get the path to `site-packages`: {ret}") + ctx.exit(1) + site_packages_json = json.loads(ret.stdout.strip().decode()) + ctx.info(f"Discovered 'site-packages' paths: {site_packages_json}") + else: + env["RELENV_PIP_DIR"] = "1" + pip_bin = env_scripts_dir / "pip3" + ctx.run( + str(pip_bin), + "install", + "--no-warn-script-location", + str(salt_archive), + env=env, + ) + if platform == "darwin": + + def errfn(fn, path, err): + ctx.info(f"Removing {path} failed: {err}") + + for subdir in ("opt", "etc", "Library"): + path = onedir_env / subdir + if path.exists(): + shutil.rmtree(path, onerror=errfn) + + python_executable = str(env_scripts_dir / "python3") + ret = ctx.run( + python_executable, + "-c", + "import json, sys, site, pathlib; sys.stdout.write(json.dumps(site.getsitepackages()))", + capture=True, + ) + if ret.returncode: + ctx.error(f"Failed to get the path to `site-packages`: {ret}") + ctx.exit(1) + site_packages_json = json.loads(ret.stdout.strip().decode()) + ctx.info(f"Discovered 'site-packages' paths: {site_packages_json}") + + site_packages: str + for site_packages_path in site_packages_json: + if "site-packages" in site_packages_path: + site_packages = site_packages_path + break + else: + ctx.error("Cloud not find a site-packages path with 'site-packages' in it?!") + ctx.exit(1) + + ret = ctx.run( + str(python_executable), + "-c", + "import sys; print('{}.{}'.format(*sys.version_info))", + capture=True, + ) + python_version_info = ret.stdout.strip().decode() + extras_dir = onedir_env / f"extras-{python_version_info}" + ctx.info(f"Creating Salt's extras path: {extras_dir}") + extras_dir.mkdir(exist_ok=True) + + for fname in ("_salt_onedir_extras.py", "_salt_onedir_extras.pth"): + src = tools.utils.REPO_ROOT / "pkg" / "common" / "onedir" / fname + dst = pathlib.Path(site_packages) / fname + ctx.info(f"Copying '{src.relative_to(tools.utils.REPO_ROOT)}' to '{dst}' ...") + shutil.copyfile(src, dst) + + +def _check_pkg_build_files_exist(ctx: Context, **kwargs): + for name, path in kwargs.items(): + if not path.exists(): + ctx.error(f"The path {path} does not exist, {name} is not valid... exiting") + ctx.exit(1) diff --git a/tools/pkg/repo/__init__.py b/tools/pkg/repo/__init__.py new file mode 100644 index 000000000000..e48671051f2a --- /dev/null +++ b/tools/pkg/repo/__init__.py @@ -0,0 +1,171 @@ +""" +These commands are used to build the package repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import logging +import os +import pathlib +import shutil +from typing import TYPE_CHECKING + +import boto3 +from botocore.exceptions import ClientError +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +from tools.utils import Version, get_salt_releases + +log = logging.getLogger(__name__) + +# Define the command group +repo = command_group( + name="repo", + help="Packaging Repository Related Commands", + description=__doc__, + parent="pkg", +) + + +@repo.command(name="backup-previous-releases") +def backup_previous_releases(ctx: Context): + """ + Backup release bucket. + """ + _rclone(ctx, tools.utils.RELEASE_BUCKET_NAME, tools.utils.BACKUP_BUCKET_NAME) + ctx.info("Done") + + +@repo.command(name="restore-previous-releases") +def restore_previous_releases(ctx: Context): + """ + Restore release bucket from backup. + """ + _rclone(ctx, tools.utils.BACKUP_BUCKET_NAME, tools.utils.RELEASE_BUCKET_NAME) + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is not None: + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"backup-complete=true\n") + ctx.info("Done") + + +def _rclone(ctx: Context, src: str, dst: str): + rclone = shutil.which("rclone") + if not rclone: + ctx.error("Could not find the rclone binary") + ctx.exit(1) + + if TYPE_CHECKING: + assert rclone + + env = os.environ.copy() + env["RCLONE_CONFIG_S3_TYPE"] = "s3" + cmdline: list[str] = [ + rclone, + "sync", + "--auto-confirm", + "--human-readable", + "--checksum", + "--color=always", + "--metadata", + "--s3-env-auth", + "--s3-location-constraint=us-west-2", + "--s3-provider=AWS", + "--s3-region=us-west-2", + "--stats-file-name-length=0", + "--stats-one-line", + "--stats=5s", + "--transfers=50", + "--fast-list", + "--verbose", + "--exclude=salt-dev/*", + ] + if src == tools.utils.RELEASE_BUCKET_NAME: + cmdline.append("--s3-storage-class=INTELLIGENT_TIERING") + cmdline.extend([f"s3://{src}", f"s3://{dst}"]) + ctx.info(f"Running: {' '.join(cmdline)}") + ret = ctx.run(*cmdline, env=env, check=False) + if ret.returncode: + ctx.error(f"Failed to sync from s3://{src} to s3://{dst}") + ctx.exit(1) + + +@repo.command( + name="confirm-unreleased", + arguments={ + "salt_version": { + "help": "The salt version to check", + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + }, +) +def confirm_unreleased( + ctx: Context, salt_version: str, repository: str = "saltstack/salt" +): + """ + Confirm that the passed version is not yet tagged and/or released. + """ + releases = get_salt_releases(ctx, repository) + if Version(salt_version) in releases: + ctx.error(f"There's already a '{salt_version}' tag or github release.") + ctx.exit(1) + ctx.info(f"Could not find a release for Salt Version '{salt_version}'") + ctx.exit(0) + + +@repo.command( + name="confirm-staged", + arguments={ + "salt_version": { + "help": "The salt version to check", + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + }, +) +def confirm_staged(ctx: Context, salt_version: str, repository: str = "saltstack/salt"): + """ + Confirm that the passed version has been staged for release. + """ + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + repo_release_symlinks_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + for remote_path in (repo_release_files_path, repo_release_symlinks_path): + try: + bucket_name = tools.utils.STAGING_BUCKET_NAME + ctx.info( + f"Checking for the presence of {remote_path} on bucket {bucket_name} ..." + ) + s3.head_object( + Bucket=bucket_name, + Key=str(remote_path), + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Could not get information about {remote_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {remote_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error(f"Could get information about {remote_path}: {exc}") + ctx.exit(1) + log.exception(f"Error getting information about {remote_path}: {exc}") + ctx.exit(1) + ctx.info(f"Version {salt_version} has been staged for release") + ctx.exit(0) diff --git a/tools/pkg/repo/create.py b/tools/pkg/repo/create.py new file mode 100644 index 000000000000..d9b8fb0a97d4 --- /dev/null +++ b/tools/pkg/repo/create.py @@ -0,0 +1,1030 @@ +""" +These commands are used to build the package repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import hashlib +import json +import logging +import os +import pathlib +import shutil +import textwrap +from datetime import datetime +from typing import TYPE_CHECKING + +import boto3 +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +from tools.utils import Version, parse_versions +from tools.utils.repo import ( + create_full_repo_path, + create_top_level_repo_path, + get_repo_json_file_contents, +) + +log = logging.getLogger(__name__) + +create = command_group( + name="create", + help="Packaging Repository Creation Related Commands", + parent=["pkg", "repo"], +) + + +_deb_distro_info = { + "debian": { + "10": { + "label": "deb10ary", + "codename": "buster", + "suitename": "oldoldstable", + }, + "11": { + "label": "deb11ary", + "codename": "bullseye", + "suitename": "oldstable", + }, + "12": { + "label": "deb12ary", + "codename": "bookworm", + "suitename": "stable", + }, + }, + "ubuntu": { + "20.04": { + "label": "salt_ubuntu2004", + "codename": "focal", + }, + "22.04": { + "label": "salt_ubuntu2204", + "codename": "jammy", + }, + }, +} + + +@create.command( + name="deb", + arguments={ + "salt_version": { + "help": ( + "The salt version for which to build the repository configuration files. " + "If not passed, it will be discovered by running 'python3 salt/version.py'." + ), + "required": True, + }, + "distro": { + "help": "The debian based distribution to build the repository for", + "choices": list(_deb_distro_info), + "required": True, + }, + "distro_version": { + "help": "The distro version.", + "required": True, + }, + "distro_arch": { + "help": "The distribution architecture", + "choices": ("x86_64", "amd64", "aarch64", "arm64"), + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def debian( + ctx: Context, + salt_version: str = None, + distro: str = None, + distro_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + distro_arch: str = "amd64", + nightly_build_from: str = None, +): + """ + Create the debian repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert distro is not None + assert distro_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + display_name = f"{distro.capitalize()} {distro_version}" + if distro_version not in _deb_distro_info[distro]: + ctx.error(f"Support for {display_name} is missing.") + ctx.exit(1) + + if distro_arch == "x86_64": + ctx.info(f"The {distro_arch} arch is an alias for 'amd64'. Adjusting.") + distro_arch = "amd64" + + if distro_arch == "aarch64": + ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") + distro_arch = "arm64" + + distro_details = _deb_distro_info[distro][distro_version] + + ctx.info("Distribution Details:") + ctx.print(distro_details, soft_wrap=True) + if TYPE_CHECKING: + assert isinstance(distro_details["label"], str) + assert isinstance(distro_details["codename"], str) + assert isinstance(distro_details["suitename"], str) + label: str = distro_details["label"] + codename: str = distro_details["codename"] + + ftp_archive_config_suite = "" + if distro == "debian": + suitename: str = distro_details["suitename"] + ftp_archive_config_suite = ( + f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n""" + ) + archive_description = f"SaltProject {display_name} Python 3{'' if not nightly_build_from else ' development'} Salt package repo" + ftp_archive_config = f"""\ + APT::FTPArchive::Release::Origin "SaltProject"; + APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite} + APT::FTPArchive::Release::Codename "{codename}"; + APT::FTPArchive::Release::Architectures "{distro_arch}"; + APT::FTPArchive::Release::Components "main"; + APT::FTPArchive::Release::Description "{archive_description}"; + APT::FTPArchive::Release::Acquire-By-Hash "yes"; + Dir {{ + ArchiveDir "."; + }}; + BinDirectory "pool" {{ + Packages "dists/{codename}/main/binary-{distro_arch}/Packages"; + Sources "dists/{codename}/main/source/Sources"; + Contents "dists/{codename}/main/Contents-{distro_arch}"; + }} + """ + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf" + ctx.info(f"Writing {ftp_archive_config_file} ...") + ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config)) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + pool_path = create_repo_path / "pool" + pool_path.mkdir(exist_ok=True) + for fpath in incoming.iterdir(): + dpath = pool_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if fpath.suffix == ".dsc": + ctx.info(f"Running 'debsign' on {dpath} ...") + ctx.run("debsign", "--re-sign", "-k", key_id, str(dpath), interactive=True) + + dists_path = create_repo_path / "dists" + symlink_parent_path = dists_path / codename / "main" + symlink_paths = ( + symlink_parent_path / "by-hash" / "SHA256", + symlink_parent_path / "source" / "by-hash" / "SHA256", + symlink_parent_path / f"binary-{distro_arch}" / "by-hash" / "SHA256", + ) + + for path in symlink_paths: + path.mkdir(exist_ok=True, parents=True) + + cmdline = ["apt-ftparchive", "generate", "apt-ftparchive.conf"] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + + ctx.info("Creating by-hash symlinks ...") + for path in symlink_paths: + for fpath in path.parent.parent.iterdir(): + if not fpath.is_file(): + continue + sha256sum = ctx.run("sha256sum", str(fpath), capture=True) + link = path / sha256sum.stdout.decode().split()[0] + link.symlink_to(f"../../{fpath.name}") + + cmdline = [ + "apt-ftparchive", + "--no-md5", + "--no-sha1", + "--no-sha512", + "release", + "-c", + "apt-ftparchive.conf", + f"dists/{codename}/", + ] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ret = ctx.run(*cmdline, capture=True, cwd=create_repo_path) + release_file = dists_path / codename / "Release" + ctx.info(f"Writing {release_file} with the output of the previous command...") + release_file.write_bytes(ret.stdout) + + cmdline = [ + "gpg", + "-u", + key_id, + "-o", + f"dists/{codename}/InRelease", + "-a", + "-s", + "--clearsign", + f"dists/{codename}/Release", + ] + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + + cmdline = [ + "gpg", + "-u", + key_id, + "-o", + f"dists/{codename}/Release.gpg", + "-a", + "-b", + "-s", + f"dists/{codename}/Release", + ] + + ctx.info(f"Running '{' '.join(cmdline)}' ...") + ctx.run(*cmdline, cwd=create_repo_path) + if not nightly_build_from: + remote_versions = _get_remote_versions( + tools.utils.STAGING_BUCKET_NAME, + create_repo_path.parent.relative_to(repo_path), + ) + major_version = Version(salt_version).major + matching_major = None + for version in remote_versions: + if version.major == major_version: + matching_major = version + break + if not matching_major or matching_major <= salt_version: + major_link = create_repo_path.parent.parent / str(major_version) + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + major_link.symlink_to(f"minor/{salt_version}") + if not remote_versions or remote_versions[0] <= salt_version: + latest_link = create_repo_path.parent.parent / "latest" + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + latest_link.symlink_to(f"minor/{salt_version}") + + ctx.info("Done") + + +_rpm_distro_info = { + "amazon": ["2", "2023"], + "redhat": ["7", "8", "9"], + "fedora": ["36", "37", "38"], + "photon": ["3", "4", "5"], +} + + +@create.command( + name="rpm", + arguments={ + "salt_version": { + "help": ( + "The salt version for which to build the repository configuration files. " + "If not passed, it will be discovered by running 'python3 salt/version.py'." + ), + "required": True, + }, + "distro": { + "help": "The debian based distribution to build the repository for", + "choices": list(_rpm_distro_info), + "required": True, + }, + "distro_version": { + "help": "The distro version.", + "required": True, + }, + "distro_arch": { + "help": "The distribution architecture", + "choices": ("x86_64", "aarch64", "arm64"), + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def rpm( + ctx: Context, + salt_version: str = None, + distro: str = None, + distro_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + distro_arch: str = "amd64", + nightly_build_from: str = None, +): + """ + Create the redhat repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert distro is not None + assert distro_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + display_name = f"{distro.capitalize()} {distro_version}" + if distro_version not in _rpm_distro_info[distro]: + ctx.error(f"Support for {display_name} is missing.") + ctx.exit(1) + + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version=distro_version, + distro_arch=distro_arch, + nightly_build_from=nightly_build_from, + ) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + for fpath in incoming.iterdir(): + if ".src" in fpath.suffixes: + dpath = create_repo_path / "SRPMS" / fpath.name + else: + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if fpath.suffix == ".rpm": + ctx.info(f"Running 'rpmsign' on {dpath} ...") + ctx.run( + "rpmsign", + "--key-id", + key_id, + "--addsign", + "--digest-algo=sha256", + str(dpath), + ) + + createrepo = shutil.which("createrepo") + if createrepo is None: + container = "ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9" + ctx.info(f"Using docker container '{container}' to call 'createrepo'...") + uid = ctx.run("id", "-u", capture=True).stdout.strip().decode() + gid = ctx.run("id", "-g", capture=True).stdout.strip().decode() + ctx.run( + "docker", + "run", + "--rm", + "-v", + f"{create_repo_path.resolve()}:/code", + "-u", + f"{uid}:{gid}", + "-w", + "/code", + container, + "createrepo", + ".", + ) + else: + ctx.run("createrepo", ".", cwd=create_repo_path) + + if nightly_build_from: + repo_domain = os.environ.get("SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io") + else: + repo_domain = os.environ.get( + "SALT_REPO_DOMAIN_STAGING", "staging.repo.saltproject.io" + ) + + salt_repo_user = os.environ.get("SALT_REPO_USER") + if salt_repo_user: + log.info( + "SALT_REPO_USER: %s", + salt_repo_user[0] + "*" * (len(salt_repo_user) - 2) + salt_repo_user[-1], + ) + salt_repo_pass = os.environ.get("SALT_REPO_PASS") + if salt_repo_pass: + log.info( + "SALT_REPO_PASS: %s", + salt_repo_pass[0] + "*" * (len(salt_repo_pass) - 2) + salt_repo_pass[-1], + ) + if salt_repo_user and salt_repo_pass: + repo_domain = f"{salt_repo_user}:{salt_repo_pass}@{repo_domain}" + + def _create_repo_file(create_repo_path, url_suffix): + ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...") + if nightly_build_from: + base_url = f"salt-dev/{nightly_build_from}/{datetime.utcnow().strftime('%Y-%m-%d')}/" + repo_file_contents = "[salt-nightly-repo]" + elif "rc" in salt_version: + base_url = "salt_rc/" + repo_file_contents = "[salt-rc-repo]" + else: + base_url = "" + repo_file_contents = "[salt-repo]" + base_url += f"salt/py3/{distro}/{distro_version}/{distro_arch}/{url_suffix}" + if distro == "amazon": + distro_name = "Amazon Linux" + elif distro == "redhat": + distro_name = "RHEL/CentOS" + else: + distro_name = distro.capitalize() + + if distro != "photon" and int(distro_version) < 8: + failovermethod = "\n failovermethod=priority" + else: + failovermethod = "" + + repo_file_contents += textwrap.dedent( + f""" + name=Salt repo for {distro_name} {distro_version} PY3 + baseurl=https://{repo_domain}/{base_url} + skip_if_unavailable=True{failovermethod} + priority=10 + enabled=1 + enabled_metadata=1 + gpgcheck=1 + gpgkey=https://{repo_domain}/{base_url}/{tools.utils.GPG_KEY_FILENAME}.pub + """ + ) + create_repo_path.write_text(repo_file_contents) + + if nightly_build_from: + repo_file_path = create_repo_path.parent / "nightly.repo" + else: + repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo" + + _create_repo_file(repo_file_path, f"minor/{salt_version}") + + if not nightly_build_from: + remote_versions = _get_remote_versions( + tools.utils.STAGING_BUCKET_NAME, + create_repo_path.parent.relative_to(repo_path), + ) + major_version = Version(salt_version).major + matching_major = None + for version in remote_versions: + if version.major == major_version: + matching_major = version + break + if not matching_major or matching_major <= salt_version: + major_link = create_repo_path.parent.parent / str(major_version) + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + major_link.symlink_to(f"minor/{salt_version}") + repo_file_path = create_repo_path.parent.parent / f"{major_version}.repo" + _create_repo_file(repo_file_path, str(major_version)) + if not remote_versions or remote_versions[0] <= salt_version: + latest_link = create_repo_path.parent.parent / "latest" + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + latest_link.symlink_to(f"minor/{salt_version}") + repo_file_path = create_repo_path.parent.parent / "latest.repo" + _create_repo_file(repo_file_path, "latest") + + ctx.info("Done") + + +@create.command( + name="windows", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def windows( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the windows repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="windows", + pkg_suffixes=(".msi", ".exe"), + ) + ctx.info("Done") + + +@create.command( + name="macos", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def macos( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the windows repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="macos", + pkg_suffixes=(".pkg",), + ) + ctx.info("Done") + + +@create.command( + name="onedir", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def onedir( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the onedir repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + _create_onedir_based_repo( + ctx, + salt_version=salt_version, + nightly_build_from=nightly_build_from, + repo_path=repo_path, + incoming=incoming, + key_id=key_id, + distro="onedir", + pkg_suffixes=(".xz", ".zip"), + ) + ctx.info("Done") + + +@create.command( + name="src", + arguments={ + "salt_version": { + "help": "The salt version for which to build the repository", + "required": True, + }, + "repo_path": { + "help": "Path where the repository shall be created.", + "required": True, + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "incoming": { + "help": ( + "The path to the directory containing the files that should added to " + "the repository." + ), + "required": True, + }, + "nightly_build_from": { + "help": "Developement repository target", + }, + }, +) +def src( + ctx: Context, + salt_version: str = None, + incoming: pathlib.Path = None, + repo_path: pathlib.Path = None, + key_id: str = None, + nightly_build_from: str = None, +): + """ + Create the onedir repository. + """ + if TYPE_CHECKING: + assert salt_version is not None + assert incoming is not None + assert repo_path is not None + assert key_id is not None + + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro="src", + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + create_repo_path = create_repo_path / salt_version + create_repo_path.mkdir(exist_ok=True, parents=True) + hashes_base_path = create_repo_path / f"salt-{salt_version}" + for fpath in incoming.iterdir(): + if fpath.suffix not in (".gz",): + continue + ctx.info(f"* Processing {fpath} ...") + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + for hash_name in ("blake2b", "sha512", "sha3_512"): + ctx.info(f" * Calculating {hash_name} ...") + hexdigest = _get_file_checksum(fpath, hash_name) + with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + with open(f"{dpath}.{hash_name}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + + for fpath in create_repo_path.iterdir(): + if fpath.suffix in (".pub", ".gpg"): + continue + tools.utils.gpg_sign(ctx, key_id, fpath) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + ctx.info("Done") + + +def _get_remote_versions(bucket_name: str, remote_path: str): + log.info( + "Getting remote versions from bucket %r under path: %s", + bucket_name, + remote_path, + ) + remote_path = str(remote_path) + if not remote_path.endswith("/"): + remote_path += "/" + + s3 = boto3.client("s3") + ret = s3.list_objects( + Bucket=bucket_name, + Delimiter="/", + Prefix=remote_path, + ) + if "CommonPrefixes" not in ret: + return [] + versions = [] + for entry in ret["CommonPrefixes"]: + _, version = entry["Prefix"].rstrip("/").rsplit("/", 1) + if version == "latest": + continue + versions.append(Version(version)) + versions.sort(reverse=True) + log.info("Remote versions collected: %s", versions) + return versions + + +def _create_onedir_based_repo( + ctx: Context, + salt_version: str, + nightly_build_from: str | None, + repo_path: pathlib.Path, + incoming: pathlib.Path, + key_id: str, + distro: str, + pkg_suffixes: tuple[str, ...], +): + ctx.info("Creating repository directory structure ...") + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + nightly_build_from=nightly_build_from, + ) + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro, + nightly_build_from=nightly_build_from, + ) + if not nightly_build_from: + repo_json_path = create_repo_path.parent.parent / "repo.json" + else: + repo_json_path = create_repo_path.parent / "repo.json" + + if nightly_build_from: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + else: + bucket_name = tools.utils.STAGING_BUCKET_NAME + + release_json = {} + + copy_exclusions = ( + ".blake2b", + ".sha512", + ".sha3_512", + ".BLAKE2B", + ".SHA512", + ".SHA3_512", + ".json", + ) + hashes_base_path = create_repo_path / f"salt-{salt_version}" + for fpath in incoming.iterdir(): + if fpath.suffix in copy_exclusions: + continue + ctx.info(f"* Processing {fpath} ...") + dpath = create_repo_path / fpath.name + ctx.info(f"Copying {fpath} to {dpath} ...") + shutil.copyfile(fpath, dpath) + if "-amd64" in dpath.name.lower(): + arch = "amd64" + elif "-x86_64" in dpath.name.lower(): + arch = "x86_64" + elif "-x86" in dpath.name.lower(): + arch = "x86" + elif "-aarch64" in dpath.name.lower(): + arch = "aarch64" + elif "-arm64" in dpath.name.lower(): + arch = "arm64" + else: + ctx.error( + f"Cannot pickup the right architecture from the filename '{dpath.name}'." + ) + ctx.exit(1) + if distro == "onedir": + if "-onedir-linux-" in dpath.name.lower(): + release_os = "linux" + elif "-onedir-darwin-" in dpath.name.lower(): + release_os = "macos" + elif "-onedir-windows-" in dpath.name.lower(): + release_os = "windows" + else: + ctx.error( + f"Cannot pickup the right OS from the filename '{dpath.name}'." + ) + ctx.exit(1) + else: + release_os = distro + release_json[dpath.name] = { + "name": dpath.name, + "version": salt_version, + "os": release_os, + "arch": arch, + } + for hash_name in ("blake2b", "sha512", "sha3_512"): + ctx.info(f" * Calculating {hash_name} ...") + hexdigest = _get_file_checksum(fpath, hash_name) + release_json[dpath.name][hash_name.upper()] = hexdigest + with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + with open(f"{dpath}.{hash_name}", "a+") as wfh: + wfh.write(f"{hexdigest} {dpath.name}\n") + + for fpath in create_repo_path.iterdir(): + if fpath.suffix in pkg_suffixes: + continue + tools.utils.gpg_sign(ctx, key_id, fpath) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, create_repo_path) + + repo_json = get_repo_json_file_contents( + ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path + ) + if nightly_build_from: + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) + return + + major_version = Version(salt_version).major + minor_repo_json_path = create_repo_path.parent / "repo.json" + minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=bucket_name, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + minor_repo_json[salt_version] = release_json + versions = parse_versions(*list(minor_repo_json)) + ctx.info( + f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in versions)}" + ) + minor_versions = [v for v in versions if v.major == major_version] + ctx.info( + f"Collected versions(Matching major: {major_version}) from " + f"{minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in minor_versions)}" + ) + if not versions: + latest_version = Version(salt_version) + else: + latest_version = versions[0] + if not minor_versions: + latest_minor_version = Version(salt_version) + else: + latest_minor_version = minor_versions[0] + + ctx.info(f"Release Version: {salt_version}") + ctx.info(f"Latest Repo Version: {latest_version}") + ctx.info(f"Latest Release Minor Version: {latest_minor_version}") + + latest_link = create_repo_path.parent.parent / "latest" + if latest_version <= salt_version: + repo_json["latest"] = release_json + ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") + if latest_link.exists(): + latest_link.unlink() + latest_link.symlink_to(f"minor/{salt_version}") + else: + ctx.info( + f"Not creating the '{latest_link.relative_to(repo_path)}' symlink " + f"since {latest_version} > {salt_version}" + ) + + major_link = create_repo_path.parent.parent / str(major_version) + if latest_minor_version <= salt_version: + minor_repo_json["latest"] = release_json + # This is the latest minor, update the major in the top level repo.json + # to this version + repo_json[str(major_version)] = release_json + ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") + if major_link.exists(): + major_link.unlink() + major_link.symlink_to(f"minor/{salt_version}") + else: + ctx.info( + f"Not creating the '{major_link.relative_to(repo_path)}' symlink " + f"since {latest_minor_version} > {salt_version}" + ) + + ctx.info(f"Writing {minor_repo_json_path} ...") + minor_repo_json_path.write_text(json.dumps(minor_repo_json, sort_keys=True)) + + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) + + +def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str: + + with fpath.open("rb") as rfh: + try: + digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined] + except AttributeError: + # Python < 3.11 + buf = bytearray(2**18) # Reusable buffer to reduce allocations. + view = memoryview(buf) + digest = getattr(hashlib, hash_name)() + while True: + size = rfh.readinto(buf) + if size == 0: + break # EOF + digest.update(view[:size]) + hexdigest: str = digest.hexdigest() + return hexdigest diff --git a/tools/pkg/repo/publish.py b/tools/pkg/repo/publish.py new file mode 100644 index 000000000000..2a743ac046bb --- /dev/null +++ b/tools/pkg/repo/publish.py @@ -0,0 +1,643 @@ +""" +These commands are used to build the package repository files. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import fnmatch +import json +import logging +import os +import pathlib +import re +import tempfile +import textwrap +from typing import TYPE_CHECKING, Any + +import boto3 +import packaging.version +from botocore.exceptions import ClientError +from ptscripts import Context, command_group + +import tools.pkg +import tools.utils +import tools.utils.repo +from tools.utils import Version, get_salt_releases, parse_versions +from tools.utils.repo import create_full_repo_path, get_repo_json_file_contents + +log = logging.getLogger(__name__) + +publish = command_group( + name="publish", + help="Packaging Repository Publication Related Commands", + parent=["pkg", "repo"], +) + + +@publish.command( + arguments={ + "repo_path": { + "help": "Local path for the repository that shall be published.", + }, + "salt_version": { + "help": "The salt version of the repository to publish", + "required": True, + }, + } +) +def nightly(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): + """ + Publish to the nightly bucket. + """ + if TYPE_CHECKING: + assert salt_version is not None + _publish_repo( + ctx, repo_path=repo_path, nightly_build=True, salt_version=salt_version + ) + + +@publish.command( + arguments={ + "repo_path": { + "help": "Local path for the repository that shall be published.", + }, + "salt_version": { + "help": "The salt version of the repository to publish", + "required": True, + }, + } +) +def staging(ctx: Context, repo_path: pathlib.Path, salt_version: str = None): + """ + Publish to the staging bucket. + """ + if TYPE_CHECKING: + assert salt_version is not None + _publish_repo(ctx, repo_path=repo_path, stage=True, salt_version=salt_version) + + +@publish.command( + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + } +) +def release(ctx: Context, salt_version: str): + """ + Publish to the release bucket. + """ + if "rc" in salt_version: + bucket_folder = "salt_rc/salt/py3" + else: + bucket_folder = "salt/py3" + + files_to_copy: list[str] + directories_to_delete: list[str] = [] + + ctx.info("Grabbing remote file listing of files to copy...") + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + repo_release_symlinks_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: + local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name + try: + bucket_name = tools.utils.STAGING_BUCKET_NAME + with local_release_files_path.open("wb") as wfh: + ctx.info( + f"Downloading {repo_release_files_path} from bucket {bucket_name} ..." + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_release_files_path), + Fileobj=wfh, + ) + files_to_copy = json.loads(local_release_files_path.read_text()) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {repo_release_files_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {repo_release_files_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error( + f"Could not download {repo_release_files_path} from bucket: {exc}" + ) + ctx.exit(1) + log.exception(f"Error downloading {repo_release_files_path}: {exc}") + ctx.exit(1) + local_release_symlinks_path = ( + pathlib.Path(tsd) / repo_release_symlinks_path.name + ) + try: + with local_release_symlinks_path.open("wb") as wfh: + ctx.info( + f"Downloading {repo_release_symlinks_path} from bucket {bucket_name} ..." + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_release_symlinks_path), + Fileobj=wfh, + ) + directories_to_delete = json.loads(local_release_symlinks_path.read_text()) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {repo_release_symlinks_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error( + f"Could not download {repo_release_symlinks_path} from bucket: {exc}" + ) + ctx.exit(1) + log.exception(f"Error downloading {repo_release_symlinks_path}: {exc}") + ctx.exit(1) + + if directories_to_delete: + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Deleting directories to override.", + total=len(directories_to_delete), + ) + for directory in directories_to_delete: + try: + objects_to_delete: list[dict[str, str]] = [] + for path in _get_repo_file_list( + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + bucket_folder=bucket_folder, + glob_match=f"{directory}/**", + ): + objects_to_delete.append({"Key": path}) + if objects_to_delete: + s3.delete_objects( + Bucket=tools.utils.RELEASE_BUCKET_NAME, + Delete={"Objects": objects_to_delete}, + ) + except ClientError: + log.exception("Failed to delete remote files") + finally: + progress.update(task, advance=1) + + already_copied_files: list[str] = [] + s3 = boto3.client("s3") + dot_repo_files = [] + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Copying files between buckets", total=len(files_to_copy) + ) + for fpath in files_to_copy: + if fpath in already_copied_files: + continue + if fpath.endswith(".repo"): + dot_repo_files.append(fpath) + ctx.info(f" * Copying {fpath}") + try: + s3.copy_object( + Bucket=tools.utils.RELEASE_BUCKET_NAME, + Key=fpath, + CopySource={ + "Bucket": tools.utils.STAGING_BUCKET_NAME, + "Key": fpath, + }, + MetadataDirective="COPY", + TaggingDirective="COPY", + ServerSideEncryption="AES256", + ) + already_copied_files.append(fpath) + except ClientError: + log.exception(f"Failed to copy {fpath}") + finally: + progress.update(task, advance=1) + + # Now let's get the onedir based repositories where we need to update several repo.json + major_version = packaging.version.parse(salt_version).major + with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: + repo_path = pathlib.Path(tsd) + for distro in ("windows", "macos", "onedir"): + + create_repo_path = create_full_repo_path( + ctx, + repo_path, + salt_version, + distro=distro, + ) + repo_json_path = create_repo_path.parent.parent / "repo.json" + + release_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=repo_json_path, + ) + minor_repo_json_path = create_repo_path.parent / "repo.json" + + staging_minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.STAGING_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + release_minor_repo_json = get_repo_json_file_contents( + ctx, + bucket_name=tools.utils.RELEASE_BUCKET_NAME, + repo_path=repo_path, + repo_json_path=minor_repo_json_path, + ) + + release_json = staging_minor_repo_json[salt_version] + + major_version = Version(salt_version).major + versions = parse_versions(*list(release_minor_repo_json)) + ctx.info( + f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in versions)}" + ) + minor_versions = [v for v in versions if v.major == major_version] + ctx.info( + f"Collected versions(Matching major: {major_version}) from " + f"{minor_repo_json_path.relative_to(repo_path)}: " + f"{', '.join(str(vs) for vs in minor_versions)}" + ) + if not versions: + latest_version = Version(salt_version) + else: + latest_version = versions[0] + if not minor_versions: + latest_minor_version = Version(salt_version) + else: + latest_minor_version = minor_versions[0] + + ctx.info(f"Release Version: {salt_version}") + ctx.info(f"Latest Repo Version: {latest_version}") + ctx.info(f"Latest Release Minor Version: {latest_minor_version}") + + # Add the minor version + release_minor_repo_json[salt_version] = release_json + + if latest_version <= salt_version: + release_repo_json["latest"] = release_json + + if latest_minor_version <= salt_version: + release_minor_repo_json["latest"] = release_json + + ctx.info(f"Writing {minor_repo_json_path} ...") + minor_repo_json_path.write_text( + json.dumps(release_minor_repo_json, sort_keys=True) + ) + ctx.info(f"Writing {repo_json_path} ...") + repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True)) + + # And now, let's get the several rpm "*.repo" files to update the base + # domain from staging to release + release_domain = os.environ.get( + "SALT_REPO_DOMAIN_RELEASE", "repo.saltproject.io" + ) + for path in dot_repo_files: + repo_file_path = repo_path.joinpath(path) + repo_file_path.parent.mkdir(exist_ok=True, parents=True) + bucket_name = tools.utils.STAGING_BUCKET_NAME + try: + ret = s3.head_object(Bucket=bucket_name, Key=path) + ctx.info( + f"Downloading existing '{repo_file_path.relative_to(repo_path)}' " + f"file from bucket {bucket_name}" + ) + size = ret["ContentLength"] + with repo_file_path.open("wb") as wfh: + with tools.utils.create_progress_bar( + file_progress=True + ) as progress: + task = progress.add_task( + description="Downloading...", total=size + ) + s3.download_fileobj( + Bucket=bucket_name, + Key=path, + Fileobj=wfh, + Callback=tools.utils.repo.UpdateProgress(progress, task), + ) + updated_contents = re.sub( + r"^(baseurl|gpgkey)=https://([^/]+)/(.*)$", + rf"\1=https://{release_domain}/\3", + repo_file_path.read_text(), + flags=re.MULTILINE, + ) + ctx.info(f"Updated '{repo_file_path.relative_to(repo_path)}:") + ctx.print(updated_contents) + repo_file_path.write_text(updated_contents) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + ctx.info(f"Could not find {repo_file_path} in bucket {bucket_name}") + + for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): + for path in filenames: + upload_path = pathlib.Path(dirpath, path) + relpath = upload_path.relative_to(repo_path) + size = upload_path.stat().st_size + ctx.info(f" {relpath}") + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Uploading...", total=size) + s3.upload_file( + str(upload_path), + tools.utils.RELEASE_BUCKET_NAME, + str(relpath), + Callback=tools.utils.repo.UpdateProgress(progress, task), + ) + + +@publish.command( + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + "key_id": { + "help": "The GnuPG key ID used to sign.", + "required": True, + }, + "repository": { + "help": ( + "The full repository name, ie, 'saltstack/salt' on GitHub " + "to run the checks against." + ) + }, + } +) +def github( + ctx: Context, + salt_version: str, + key_id: str = None, + repository: str = "saltstack/salt", +): + """ + Publish the release on GitHub releases. + """ + if TYPE_CHECKING: + assert key_id is not None + + s3 = boto3.client("s3") + + # Let's download the release artifacts stored in staging + artifacts_path = pathlib.Path.cwd() / "release-artifacts" + artifacts_path.mkdir(exist_ok=True) + release_artifacts_listing: dict[pathlib.Path, int] = {} + continuation_token = None + while True: + kwargs: dict[str, str] = {} + if continuation_token: + kwargs["ContinuationToken"] = continuation_token + ret = s3.list_objects_v2( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Prefix=f"release-artifacts/{salt_version}", + FetchOwner=False, + **kwargs, + ) + contents = ret.pop("Contents", None) + if contents is None: + break + for entry in contents: + entry_path = pathlib.Path(entry["Key"]) + if entry_path.name.startswith("."): + continue + release_artifacts_listing[entry_path] = entry["Size"] + if not ret["IsTruncated"]: + break + continuation_token = ret["NextContinuationToken"] + + for entry_path, size in release_artifacts_listing.items(): + ctx.info(f" * {entry_path.name}") + local_path = artifacts_path / entry_path.name + with local_path.open("wb") as wfh: + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Downloading...", total=size) + s3.download_fileobj( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Key=str(entry_path), + Fileobj=wfh, + Callback=tools.utils.repo.UpdateProgress(progress, task), + ) + + for artifact in artifacts_path.iterdir(): + if artifact.suffix in (".patch", ".asc", ".gpg", ".pub"): + continue + tools.utils.gpg_sign(ctx, key_id, artifact) + + # Export the GPG key in use + tools.utils.export_gpg_key(ctx, key_id, artifacts_path) + + release_message = f"""\ + # Welcome to Salt v{salt_version} + + | :exclamation: ATTENTION | + |:-------------------------------------------------------------------------------------------------------------------------| + | The archives generated by GitHub(`Source code(zip)`, `Source code(tar.gz)`) will not report Salt's version properly. | + | Please use the tarball generated by The Salt Project Team(`salt-{salt_version}.tar.gz`). + """ + release_message_path = artifacts_path / "gh-release-body.md" + release_message_path.write_text(textwrap.dedent(release_message).strip()) + + github_output = os.environ.get("GITHUB_OUTPUT") + if github_output is None: + ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.") + ctx.exit(0) + + if TYPE_CHECKING: + assert github_output is not None + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"release-messsage-file={release_message_path.resolve()}\n") + + try: + releases = get_salt_releases(ctx, repository) + except SystemExit: + ctx.warn(f"Failed to get salt releases from repository '{repository}'") + releases = get_salt_releases(ctx, "saltstack/salt") + + if Version(salt_version) >= releases[-1]: + make_latest = True + else: + make_latest = False + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"make-latest={json.dumps(make_latest)}\n") + + artifacts_to_upload = [] + for artifact in artifacts_path.iterdir(): + if artifact.suffix == ".patch": + continue + if artifact.name == release_message_path.name: + continue + artifacts_to_upload.append(str(artifact.resolve())) + + with open(github_output, "a", encoding="utf-8") as wfh: + wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n") + ctx.exit(0) + + +def _get_repo_detailed_file_list( + bucket_name: str, + bucket_folder: str = "", + glob_match: str = "**", +) -> list[dict[str, Any]]: + s3 = boto3.client("s3") + listing: list[dict[str, Any]] = [] + continuation_token = None + while True: + kwargs: dict[str, str] = {} + if continuation_token: + kwargs["ContinuationToken"] = continuation_token + ret = s3.list_objects_v2( + Bucket=bucket_name, + Prefix=bucket_folder, + FetchOwner=False, + **kwargs, + ) + contents = ret.pop("Contents", None) + if contents is None: + break + for entry in contents: + if fnmatch.fnmatch(entry["Key"], glob_match): + listing.append(entry) + if not ret["IsTruncated"]: + break + continuation_token = ret["NextContinuationToken"] + return listing + + +def _get_repo_file_list( + bucket_name: str, bucket_folder: str, glob_match: str +) -> list[str]: + return [ + entry["Key"] + for entry in _get_repo_detailed_file_list( + bucket_name, bucket_folder, glob_match=glob_match + ) + ] + + +def _publish_repo( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + nightly_build: bool = False, + stage: bool = False, +): + """ + Publish packaging repositories. + """ + if nightly_build: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + elif stage: + bucket_name = tools.utils.STAGING_BUCKET_NAME + else: + bucket_name = tools.utils.RELEASE_BUCKET_NAME + + ctx.info("Preparing upload ...") + s3 = boto3.client("s3") + to_delete_paths: dict[pathlib.Path, list[dict[str, str]]] = {} + to_upload_paths: list[pathlib.Path] = [] + symlink_paths: list[str] = [] + uploaded_files: list[str] = [] + for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): + for dirname in dirnames: + path = pathlib.Path(dirpath, dirname) + if not path.is_symlink(): + continue + # This is a symlink, then we need to delete all files under + # that directory in S3 because S3 does not understand symlinks + # and we would end up adding files to that folder instead of + # replacing it. + try: + relpath = path.relative_to(repo_path) + ret = s3.list_objects( + Bucket=bucket_name, + Prefix=str(relpath), + ) + if "Contents" not in ret: + continue + objects = [] + for entry in ret["Contents"]: + objects.append({"Key": entry["Key"]}) + to_delete_paths[path] = objects + symlink_paths.append(str(relpath)) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + + for fpath in filenames: + path = pathlib.Path(dirpath, fpath) + to_upload_paths.append(path) + + with tools.utils.create_progress_bar() as progress: + task = progress.add_task( + "Deleting directories to override.", total=len(to_delete_paths) + ) + for base, objects in to_delete_paths.items(): + relpath = base.relative_to(repo_path) + bucket_uri = f"s3://{bucket_name}/{relpath}" + progress.update(task, description=f"Deleting {bucket_uri}") + try: + ret = s3.delete_objects( + Bucket=bucket_name, + Delete={"Objects": objects}, + ) + except ClientError: + log.exception(f"Failed to delete {bucket_uri}") + finally: + progress.update(task, advance=1) + + try: + ctx.info("Uploading repository ...") + for upload_path in to_upload_paths: + relpath = upload_path.relative_to(repo_path) + size = upload_path.stat().st_size + ctx.info(f" {relpath}") + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Uploading...", total=size) + s3.upload_file( + str(upload_path), + bucket_name, + str(relpath), + Callback=tools.utils.repo.UpdateProgress(progress, task), + ExtraArgs={ + "Metadata": { + "x-amz-meta-salt-release-version": salt_version, + } + }, + ) + uploaded_files.append(str(relpath)) + if stage is True: + repo_files_path = f"release-artifacts/{salt_version}/.release-files.json" + ctx.info(f"Uploading {repo_files_path} ...") + s3.put_object( + Key=repo_files_path, + Bucket=bucket_name, + Body=json.dumps(uploaded_files).encode(), + Metadata={ + "x-amz-meta-salt-release-version": salt_version, + }, + ) + repo_symlinks_path = ( + f"release-artifacts/{salt_version}/.release-symlinks.json" + ) + ctx.info(f"Uploading {repo_symlinks_path} ...") + s3.put_object( + Key=repo_symlinks_path, + Bucket=bucket_name, + Body=json.dumps(symlink_paths).encode(), + Metadata={ + "x-amz-meta-salt-release-version": salt_version, + }, + ) + except KeyboardInterrupt: + pass diff --git a/tools/pkgrepo.py b/tools/pkgrepo.py deleted file mode 100644 index 1315b0238246..000000000000 --- a/tools/pkgrepo.py +++ /dev/null @@ -1,1796 +0,0 @@ -""" -These commands are used to build the pacakge repository files. -""" -# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated -from __future__ import annotations - -import fnmatch -import hashlib -import json -import logging -import os -import pathlib -import shutil -import sys -import tempfile -import textwrap -from datetime import datetime -from typing import TYPE_CHECKING, Any - -import packaging.version -from ptscripts import Context, command_group - -import tools.pkg -import tools.utils -from tools.utils import Version - -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - -log = logging.getLogger(__name__) - -# Define the command group -repo = command_group( - name="repo", - help="Packaging Repository Related Commands", - description=__doc__, - parent=tools.pkg.pkg, -) - -create = command_group( - name="create", help="Packaging Repository Creation Related Commands", parent=repo -) - -publish = command_group( - name="publish", - help="Packaging Repository Publication Related Commands", - parent=repo, -) - - -_deb_distro_info = { - "debian": { - "10": { - "label": "deb10ary", - "codename": "buster", - "suitename": "oldstable", - }, - "11": { - "label": "deb11ary", - "codename": "bullseye", - "suitename": "stable", - }, - }, - "ubuntu": { - "20.04": { - "label": "salt_ubuntu2004", - "codename": "focal", - }, - "22.04": { - "label": "salt_ubuntu2204", - "codename": "jammy", - }, - }, -} - - -@create.command( - name="deb", - arguments={ - "salt_version": { - "help": ( - "The salt version for which to build the repository configuration files. " - "If not passed, it will be discovered by running 'python3 salt/version.py'." - ), - "required": True, - }, - "distro": { - "help": "The debian based distribution to build the repository for", - "choices": list(_deb_distro_info), - "required": True, - }, - "distro_version": { - "help": "The distro version.", - "required": True, - }, - "distro_arch": { - "help": "The distribution architecture", - "choices": ("x86_64", "amd64", "aarch64", "arm64"), - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build": { - "help": "Developement repository target", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - }, -) -def debian( - ctx: Context, - salt_version: str = None, - distro: str = None, - distro_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - distro_arch: str = "amd64", - nightly_build: bool = False, - rc_build: bool = False, -): - """ - Create the debian repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert distro is not None - assert distro_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - display_name = f"{distro.capitalize()} {distro_version}" - if distro_version not in _deb_distro_info[distro]: - ctx.error(f"Support for {display_name} is missing.") - ctx.exit(1) - - if distro_arch == "x86_64": - ctx.info(f"The {distro_arch} arch is an alias for 'amd64'. Adjusting.") - distro_arch = "amd64" - - if distro_arch == "aarch64": - ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") - distro_arch = "arm64" - - distro_details = _deb_distro_info[distro][distro_version] - - ctx.info("Distribution Details:") - ctx.info(distro_details) - if TYPE_CHECKING: - assert isinstance(distro_details["label"], str) - assert isinstance(distro_details["codename"], str) - assert isinstance(distro_details["suitename"], str) - label: str = distro_details["label"] - codename: str = distro_details["codename"] - - ftp_archive_config_suite = "" - if distro == "debian": - suitename: str = distro_details["suitename"] - ftp_archive_config_suite = ( - f"""\n APT::FTPArchive::Release::Suite "{suitename}";\n""" - ) - archive_description = f"SaltProject {display_name} Python 3{'' if nightly_build else ' development'} Salt package repo" - ftp_archive_config = f"""\ - APT::FTPArchive::Release::Origin "SaltProject"; - APT::FTPArchive::Release::Label "{label}";{ftp_archive_config_suite} - APT::FTPArchive::Release::Codename "{codename}"; - APT::FTPArchive::Release::Architectures "{distro_arch}"; - APT::FTPArchive::Release::Components "main"; - APT::FTPArchive::Release::Description "{archive_description}"; - APT::FTPArchive::Release::Acquire-By-Hash "yes"; - Dir {{ - ArchiveDir "."; - }}; - BinDirectory "pool" {{ - Packages "dists/{codename}/main/binary-{distro_arch}/Packages"; - Sources "dists/{codename}/main/source/Sources"; - Contents "dists/{codename}/main/Contents-{distro_arch}"; - }} - """ - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_repo_path( - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - rc_build=rc_build, - nightly_build=nightly_build, - ) - - ftp_archive_config_file = create_repo_path / "apt-ftparchive.conf" - ctx.info(f"Writing {ftp_archive_config_file} ...") - ftp_archive_config_file.write_text(textwrap.dedent(ftp_archive_config)) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - pool_path = create_repo_path / "pool" - pool_path.mkdir(exist_ok=True) - for fpath in incoming.iterdir(): - dpath = pool_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if fpath.suffix == ".dsc": - ctx.info(f"Running 'debsign' on {dpath} ...") - ctx.run("debsign", "--re-sign", "-k", key_id, str(dpath), interactive=True) - - dists_path = create_repo_path / "dists" - symlink_parent_path = dists_path / codename / "main" - symlink_paths = ( - symlink_parent_path / "by-hash" / "SHA256", - symlink_parent_path / "source" / "by-hash" / "SHA256", - symlink_parent_path / f"binary-{distro_arch}" / "by-hash" / "SHA256", - ) - - for path in symlink_paths: - path.mkdir(exist_ok=True, parents=True) - - cmdline = ["apt-ftparchive", "generate", "apt-ftparchive.conf"] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - - ctx.info("Creating by-hash symlinks ...") - for path in symlink_paths: - for fpath in path.parent.parent.iterdir(): - if not fpath.is_file(): - continue - sha256sum = ctx.run("sha256sum", str(fpath), capture=True) - link = path / sha256sum.stdout.decode().split()[0] - link.symlink_to(f"../../{fpath.name}") - - cmdline = [ - "apt-ftparchive", - "--no-md5", - "--no-sha1", - "--no-sha512", - "release", - "-c", - "apt-ftparchive.conf", - f"dists/{codename}/", - ] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ret = ctx.run(*cmdline, capture=True, cwd=create_repo_path) - release_file = dists_path / codename / "Release" - ctx.info(f"Writing {release_file} with the output of the previous command...") - release_file.write_bytes(ret.stdout) - - cmdline = [ - "gpg", - "-u", - key_id, - "-o", - f"dists/{codename}/InRelease", - "-a", - "-s", - "--clearsign", - f"dists/{codename}/Release", - ] - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - - cmdline = [ - "gpg", - "-u", - key_id, - "-o", - f"dists/{codename}/Release.gpg", - "-a", - "-b", - "-s", - f"dists/{codename}/Release", - ] - - ctx.info(f"Running '{' '.join(cmdline)}' ...") - ctx.run(*cmdline, cwd=create_repo_path) - if nightly_build is False: - remote_versions = _get_remote_versions( - tools.utils.STAGING_BUCKET_NAME, - create_repo_path.parent.relative_to(repo_path), - ) - major_version = Version(salt_version).major - matching_major = None - for version in remote_versions: - if version.major == major_version: - matching_major = version - break - if not matching_major or matching_major < salt_version: - major_link = create_repo_path.parent.parent / str(major_version) - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - major_link.symlink_to(f"minor/{salt_version}") - if not remote_versions or remote_versions[0] < salt_version: - latest_link = create_repo_path.parent.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(f"minor/{salt_version}") - else: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) - - ctx.info("Done") - - -_rpm_distro_info = { - "amazon": ["2"], - "redhat": ["7", "8", "9"], - "fedora": ["36", "37", "38"], -} - - -@create.command( - name="rpm", - arguments={ - "salt_version": { - "help": ( - "The salt version for which to build the repository configuration files. " - "If not passed, it will be discovered by running 'python3 salt/version.py'." - ), - "required": True, - }, - "distro": { - "help": "The debian based distribution to build the repository for", - "choices": list(_rpm_distro_info), - "required": True, - }, - "distro_version": { - "help": "The distro version.", - "required": True, - }, - "distro_arch": { - "help": "The distribution architecture", - "choices": ("x86_64", "aarch64", "arm64"), - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build": { - "help": "Developement repository target", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - }, -) -def rpm( - ctx: Context, - salt_version: str = None, - distro: str = None, - distro_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - distro_arch: str = "amd64", - nightly_build: bool = False, - rc_build: bool = False, -): - """ - Create the redhat repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert distro is not None - assert distro_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - display_name = f"{distro.capitalize()} {distro_version}" - if distro_version not in _rpm_distro_info[distro]: - ctx.error(f"Support for {display_name} is missing.") - ctx.exit(1) - - if distro_arch == "aarch64": - ctx.info(f"The {distro_arch} arch is an alias for 'arm64'. Adjusting.") - distro_arch = "arm64" - - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_repo_path( - repo_path, - salt_version, - distro, - distro_version=distro_version, - distro_arch=distro_arch, - rc_build=rc_build, - nightly_build=nightly_build, - ) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - for fpath in incoming.iterdir(): - if ".src" in fpath.suffixes: - dpath = create_repo_path / "SRPMS" / fpath.name - else: - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if fpath.suffix == ".rpm": - ctx.info(f"Running 'rpmsign' on {dpath} ...") - ctx.run( - "rpmsign", - "--key-id", - key_id, - "--addsign", - "--digest-algo=sha256", - str(dpath), - ) - - createrepo = shutil.which("createrepo") - if createrepo is None: - container = "ghcr.io/saltstack/salt-ci-containers/packaging:centosstream-9" - ctx.info(f"Using docker container '{container}' to call 'createrepo'...") - uid = ctx.run("id", "-u", capture=True).stdout.strip().decode() - gid = ctx.run("id", "-g", capture=True).stdout.strip().decode() - ctx.run( - "docker", - "run", - "--rm", - "-v", - f"{create_repo_path.resolve()}:/code", - "-u", - f"{uid}:{gid}", - "-w", - "/code", - container, - "createrepo", - ".", - ) - else: - ctx.run("createrepo", ".", cwd=create_repo_path) - - def _create_repo_file(create_repo_path, url_suffix): - ctx.info(f"Creating '{repo_file_path.relative_to(repo_path)}' file ...") - if nightly_build: - base_url = "salt-dev/" - repo_file_contents = "[salt-nightly-repo]" - elif rc_build: - base_url = "salt_rc/" - repo_file_contents = "[salt-rc-repo]" - else: - base_url = "" - repo_file_contents = "[salt-repo]" - base_url += f"salt/py3/{distro}/{url_suffix}" - if distro == "amazon": - distro_name = "Amazon Linux" - elif distro == "redhat": - distro_name = "RHEL/CentOS" - else: - distro_name = distro.capitalize() - - if int(distro_version) < 8: - failovermethod = "\n failovermethod=priority" - else: - failovermethod = "" - - repo_file_contents += textwrap.dedent( - f""" - name=Salt repo for {distro_name} {distro_version} PY3 - baseurl=https://repo.saltproject.io/{base_url} - skip_if_unavailable=True{failovermethod} - priority=10 - enabled=1 - enabled_metadata=1 - gpgcheck=1 - gpgkey={base_url}/{tools.utils.GPG_KEY_FILENAME}.pub - """ - ) - create_repo_path.write_text(repo_file_contents) - - if nightly_build: - repo_file_path = create_repo_path.parent / "nightly.repo" - else: - repo_file_path = create_repo_path.parent / f"{create_repo_path.name}.repo" - - _create_repo_file(repo_file_path, salt_version) - - if nightly_build is False and rc_build is False: - remote_versions = _get_remote_versions( - tools.utils.STAGING_BUCKET_NAME, - create_repo_path.parent.relative_to(repo_path), - ) - major_version = Version(salt_version).major - matching_major = None - for version in remote_versions: - if version.major == major_version: - matching_major = version - break - if not matching_major or matching_major < salt_version: - major_link = create_repo_path.parent.parent / str(major_version) - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - major_link.symlink_to(f"minor/{salt_version}") - repo_file_path = create_repo_path.parent.parent / f"{major_version}.repo" - _create_repo_file(repo_file_path, str(major_version)) - if not remote_versions or remote_versions[0] < salt_version: - latest_link = create_repo_path.parent.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(f"minor/{salt_version}") - repo_file_path = create_repo_path.parent.parent / "latest.repo" - _create_repo_file(repo_file_path, "latest") - else: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) - repo_file_path = create_repo_path.parent.parent / "latest.repo" - _create_repo_file(repo_file_path, "latest") - - ctx.info("Done") - - -@create.command( - name="windows", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build": { - "help": "Developement repository target", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - }, -) -def windows( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build: bool = False, - rc_build: bool = False, -): - """ - Create the windows repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build=nightly_build, - rc_build=rc_build, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="windows", - pkg_suffixes=(".msi", ".exe"), - ) - ctx.info("Done") - - -@create.command( - name="macos", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build": { - "help": "Developement repository target", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - }, -) -def macos( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build: bool = False, - rc_build: bool = False, -): - """ - Create the windows repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build=nightly_build, - rc_build=rc_build, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="macos", - pkg_suffixes=(".pkg",), - ) - ctx.info("Done") - - -@create.command( - name="onedir", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build": { - "help": "Developement repository target", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - }, -) -def onedir( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build: bool = False, - rc_build: bool = False, -): - """ - Create the onedir repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - _create_onedir_based_repo( - ctx, - salt_version=salt_version, - nightly_build=nightly_build, - rc_build=rc_build, - repo_path=repo_path, - incoming=incoming, - key_id=key_id, - distro="onedir", - pkg_suffixes=(".xz", ".zip"), - ) - ctx.info("Done") - - -@create.command( - name="src", - arguments={ - "salt_version": { - "help": "The salt version for which to build the repository", - "required": True, - }, - "repo_path": { - "help": "Path where the repository shall be created.", - "required": True, - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "incoming": { - "help": ( - "The path to the directory containing the files that should added to " - "the repository." - ), - "required": True, - }, - "nightly_build": { - "help": "Developement repository target", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - }, -) -def src( - ctx: Context, - salt_version: str = None, - incoming: pathlib.Path = None, - repo_path: pathlib.Path = None, - key_id: str = None, - nightly_build: bool = False, - rc_build: bool = False, -): - """ - Create the onedir repository. - """ - if TYPE_CHECKING: - assert salt_version is not None - assert incoming is not None - assert repo_path is not None - assert key_id is not None - - ctx.info("Creating repository directory structure ...") - create_repo_path = repo_path / "salt" / "py3" / "src" / salt_version - create_repo_path.mkdir(exist_ok=True, parents=True) - hashes_base_path = create_repo_path / f"salt-{salt_version}" - for fpath in incoming.iterdir(): - if fpath.suffix not in (".gz",): - continue - ctx.info(f"* Processing {fpath} ...") - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - for hash_name in ("blake2b", "sha512", "sha3_512"): - ctx.info(f" * Calculating {hash_name} ...") - hexdigest = _get_file_checksum(fpath, hash_name) - with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - with open(f"{dpath}.{hash_name}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - - for fpath in create_repo_path.iterdir(): - tools.utils.gpg_sign(ctx, key_id, fpath) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - ctx.info("Done") - - -@publish.command( - arguments={ - "repo_path": { - "help": "Local path for the repository that shall be published.", - }, - } -) -def nightly(ctx: Context, repo_path: pathlib.Path): - """ - Publish to the nightly bucket. - """ - _publish_repo(ctx, repo_path=repo_path, nightly_build=True) - - -@publish.command( - arguments={ - "repo_path": { - "help": "Local path for the repository that shall be published.", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - } -) -def staging(ctx: Context, repo_path: pathlib.Path, rc_build: bool = False): - """ - Publish to the staging bucket. - """ - _publish_repo(ctx, repo_path=repo_path, rc_build=rc_build, stage=True) - - -@repo.command(name="backup-previous-releases") -def backup_previous_releases(ctx: Context): - """ - Backup previous releases. - """ - files_in_backup: dict[str, datetime] = {} - files_to_backup: list[tuple[str, datetime]] = [] - - ctx.info("Grabbing remote listing of files in backup ...") - for entry in _get_repo_detailed_file_list( - bucket_name=tools.utils.BACKUP_BUCKET_NAME, - ): - files_in_backup[entry["Key"]] = entry["LastModified"] - - ctx.info("Grabbing remote listing of files to backup ...") - for entry in _get_repo_detailed_file_list( - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - ): - files_to_backup.append((entry["Key"], entry["LastModified"])) - - s3 = boto3.client("s3") - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Back up previous releases", total=len(files_to_backup) - ) - for fpath, last_modified in files_to_backup: - try: - last_modified_backup = files_in_backup.get(fpath) - if last_modified_backup and last_modified_backup >= last_modified: - ctx.info(f" * Skipping unmodified {fpath}") - continue - - ctx.info(f" * Backup {fpath}") - s3.copy_object( - Bucket=tools.utils.BACKUP_BUCKET_NAME, - Key=fpath, - CopySource={ - "Bucket": tools.utils.RELEASE_BUCKET_NAME, - "Key": fpath, - }, - MetadataDirective="COPY", - TaggingDirective="COPY", - ServerSideEncryption="aws:kms", - ) - except ClientError as exc: - if "PreconditionFailed" not in str(exc): - log.exception(f"Failed to copy {fpath}") - finally: - progress.update(task, advance=1) - ctx.info("Done") - - -@publish.command( - arguments={ - "salt_version": { - "help": "The salt version to release.", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - } -) -def release( - ctx: Context, salt_version: str, key_id: str = None, rc_build: bool = False -): - """ - Publish to the release bucket. - """ - if TYPE_CHECKING: - assert key_id is not None - - if rc_build: - bucket_folder = "salt_rc/salt/py3" - else: - bucket_folder = "salt/py3" - - files_to_copy: list[str] - files_to_delete: list[str] = [] - files_to_duplicate: list[tuple[str, str]] = [] - - ctx.info("Grabbing remote file listing of files to copy...") - - glob_match = f"{bucket_folder}/**/minor/{salt_version}/**" - files_to_copy = _get_repo_file_list( - bucket_name=tools.utils.STAGING_BUCKET_NAME, - bucket_folder=bucket_folder, - glob_match=glob_match, - ) - glob_match = f"{bucket_folder}/**/src/{salt_version}/**" - files_to_copy.extend( - _get_repo_file_list( - bucket_name=tools.utils.STAGING_BUCKET_NAME, - bucket_folder=bucket_folder, - glob_match=glob_match, - ) - ) - - if not files_to_copy: - ctx.error(f"Could not find any files related to the '{salt_version}' release.") - ctx.exit(1) - - onedir_listing: dict[str, list[str]] = {} - s3 = boto3.client("s3") - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Copying files between buckets", total=len(files_to_copy) - ) - for fpath in files_to_copy: - if fpath.startswith(f"{bucket_folder}/windows/"): - if "windows" not in onedir_listing: - onedir_listing["windows"] = [] - onedir_listing["windows"].append(fpath) - elif fpath.startswith(f"{bucket_folder}/macos/"): - if "macos" not in onedir_listing: - onedir_listing["macos"] = [] - onedir_listing["macos"].append(fpath) - elif fpath.startswith(f"{bucket_folder}/onedir/"): - if "onedir" not in onedir_listing: - onedir_listing["onedir"] = [] - onedir_listing["onedir"].append(fpath) - else: - if "package" not in onedir_listing: - onedir_listing["package"] = [] - onedir_listing["package"].append(fpath) - ctx.info(f" * Copying {fpath}") - try: - s3.copy_object( - Bucket=tools.utils.RELEASE_BUCKET_NAME, - Key=fpath, - CopySource={ - "Bucket": tools.utils.STAGING_BUCKET_NAME, - "Key": fpath, - }, - MetadataDirective="COPY", - TaggingDirective="COPY", - ServerSideEncryption="aws:kms", - ) - except ClientError: - log.exception(f"Failed to copy {fpath}") - finally: - progress.update(task, advance=1) - - # Now let's get the onedir based repositories where we need to update several repo.json - update_latest = False - update_minor = False - major_version = packaging.version.parse(salt_version).major - with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: - repo_path = pathlib.Path(tsd) - for distro in ("windows", "macos", "onedir"): - - create_repo_path = _create_repo_path( - repo_path, - salt_version, - distro, - rc_build=rc_build, - ) - repo_json_path = create_repo_path.parent.parent / "repo.json" - - release_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=repo_json_path, - ) - minor_repo_json_path = create_repo_path.parent / "repo.json" - - staging_minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.STAGING_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - release_minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - - release_json = staging_minor_repo_json[salt_version] - - major_version = Version(salt_version).major - versions = _parse_versions(*list(release_minor_repo_json)) - ctx.info( - f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in versions)}" - ) - minor_versions = [v for v in versions if v.major == major_version] - ctx.info( - f"Collected versions(Matching major: {major_version}) from {minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in minor_versions)}" - ) - if not versions: - latest_version = Version(salt_version) - else: - latest_version = versions[0] - if not minor_versions: - latest_minor_version = Version(salt_version) - else: - latest_minor_version = minor_versions[0] - - ctx.info(f"Release Version: {salt_version}") - ctx.info(f"Latest Repo Version: {latest_version}") - ctx.info(f"Latest Release Minor Version: {latest_minor_version}") - - # Add the minor version - release_minor_repo_json[salt_version] = release_json - - if latest_version <= salt_version: - update_latest = True - release_repo_json["latest"] = release_json - glob_match = f"{bucket_folder}/{distro}/**/latest/**" - files_to_delete.extend( - _get_repo_file_list( - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - bucket_folder=bucket_folder, - glob_match=glob_match, - ) - ) - for fpath in onedir_listing[distro]: - files_to_duplicate.append( - (fpath, fpath.replace(f"minor/{salt_version}", "latest")) - ) - - if latest_minor_version <= salt_version: - update_minor = True - release_minor_repo_json["latest"] = release_json - glob_match = f"{bucket_folder}/{distro}/**/{major_version}/**" - files_to_delete.extend( - _get_repo_file_list( - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - bucket_folder=bucket_folder, - glob_match=glob_match, - ) - ) - for fpath in onedir_listing[distro]: - files_to_duplicate.append( - ( - fpath, - fpath.replace(f"minor/{salt_version}", str(major_version)), - ) - ) - - ctx.info(f"Writing {minor_repo_json_path} ...") - minor_repo_json_path.write_text( - json.dumps(release_minor_repo_json, sort_keys=True) - ) - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(release_repo_json, sort_keys=True)) - - # Now lets handle latest and minor updates for non one dir based repositories - onedir_based_paths = ( - f"{bucket_folder}/windows/", - f"{bucket_folder}/macos/", - f"{bucket_folder}/onedir/", - ) - if update_latest: - glob_match = f"{bucket_folder}/**/latest/**" - for fpath in _get_repo_file_list( - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - bucket_folder=bucket_folder, - glob_match=glob_match, - ): - if fpath.startswith(onedir_based_paths): - continue - files_to_delete.append(fpath) - - for fpath in onedir_listing["package"]: - files_to_duplicate.append( - (fpath, fpath.replace(f"minor/{salt_version}", "latest")) - ) - - if update_minor: - glob_match = f"{bucket_folder}/**/{major_version}/**" - for fpath in _get_repo_file_list( - bucket_name=tools.utils.RELEASE_BUCKET_NAME, - bucket_folder=bucket_folder, - glob_match=glob_match, - ): - if fpath.startswith(onedir_based_paths): - continue - files_to_delete.append(fpath) - - for fpath in onedir_listing["package"]: - files_to_duplicate.append( - (fpath, fpath.replace(f"minor/{salt_version}", str(major_version))) - ) - - if files_to_delete: - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Deleting directories to override.", total=len(files_to_delete) - ) - try: - s3.delete_objects( - Bucket=tools.utils.RELEASE_BUCKET_NAME, - Delete={ - "Objects": [ - {"Key": path for path in files_to_delete}, - ] - }, - ) - except ClientError: - log.exception("Failed to delete remote files") - finally: - progress.update(task, advance=1) - - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Copying files between buckets", total=len(files_to_duplicate) - ) - for src, dst in files_to_duplicate: - ctx.info(f" * Copying {src}\n -> {dst}") - try: - s3.copy_object( - Bucket=tools.utils.RELEASE_BUCKET_NAME, - Key=dst, - CopySource={ - "Bucket": tools.utils.STAGING_BUCKET_NAME, - "Key": src, - }, - MetadataDirective="COPY", - TaggingDirective="COPY", - ServerSideEncryption="aws:kms", - ) - except ClientError: - log.exception(f"Failed to copy {fpath}") - finally: - progress.update(task, advance=1) - - for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): - for path in filenames: - upload_path = pathlib.Path(dirpath, path) - relpath = upload_path.relative_to(repo_path) - size = upload_path.stat().st_size - ctx.info(f" {relpath}") - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Uploading...", total=size) - s3.upload_file( - str(upload_path), - tools.utils.RELEASE_BUCKET_NAME, - str(relpath), - Callback=tools.utils.UpdateProgress(progress, task), - ) - - -@publish.command( - arguments={ - "salt_version": { - "help": "The salt version to release.", - }, - "rc_build": { - "help": "Release Candidate repository target", - }, - "key_id": { - "help": "The GnuPG key ID used to sign.", - "required": True, - }, - "repository": { - "help": ( - "The full repository name, ie, 'saltstack/salt' on GitHub " - "to run the checks against." - ) - }, - } -) -def github( - ctx: Context, - salt_version: str, - key_id: str = None, - rc_build: bool = False, - repository: str = "saltstack/salt", -): - """ - Publish the release on GitHub releases. - """ - if TYPE_CHECKING: - assert key_id is not None - - s3 = boto3.client("s3") - - # Let's download the release artifacts stored in staging - artifacts_path = pathlib.Path.cwd() / "release-artifacts" - artifacts_path.mkdir(exist_ok=True) - release_artifacts_listing: dict[pathlib.Path, int] = {} - continuation_token = None - while True: - kwargs: dict[str, str] = {} - if continuation_token: - kwargs["ContinuationToken"] = continuation_token - ret = s3.list_objects_v2( - Bucket=tools.utils.STAGING_BUCKET_NAME, - Prefix=f"release-artifacts/{salt_version}", - FetchOwner=False, - **kwargs, - ) - contents = ret.pop("Contents", None) - if contents is None: - break - for entry in contents: - entry_path = pathlib.Path(entry["Key"]) - release_artifacts_listing[entry_path] = entry["Size"] - if not ret["IsTruncated"]: - break - continuation_token = ret["NextContinuationToken"] - - for entry_path, size in release_artifacts_listing.items(): - ctx.info(f" * {entry_path.name}") - local_path = artifacts_path / entry_path.name - with local_path.open("wb") as wfh: - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Downloading...", total=size) - s3.download_fileobj( - Bucket=tools.utils.STAGING_BUCKET_NAME, - Key=str(entry_path), - Fileobj=wfh, - Callback=tools.utils.UpdateProgress(progress, task), - ) - - for artifact in artifacts_path.iterdir(): - if artifact.suffix in (".patch", ".asc", ".gpg", ".pub"): - continue - tools.utils.gpg_sign(ctx, key_id, artifact) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, artifacts_path) - - release_message = f"""\ - # Welcome to Salt v{salt_version} - - | :exclamation: ATTENTION | - |:-------------------------------------------------------------------------------------------------------------------------| - | The archives generated by GitHub(`Source code(zip)`, `Source code(tar.gz)`) will not report Salt's version properly. | - | Please use the tarball generated by The Salt Project Team(`salt-{salt_version}.tar.gz`). - """ - release_message_path = artifacts_path / "gh-release-body.md" - release_message_path.write_text(textwrap.dedent(release_message).strip()) - - github_output = os.environ.get("GITHUB_OUTPUT") - if github_output is None: - ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Stop processing.") - ctx.exit(0) - - if TYPE_CHECKING: - assert github_output is not None - - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"release-messsage-file={release_message_path.resolve()}\n") - - releases = _get_salt_releases(ctx, repository) - if Version(salt_version) >= releases[-1]: - make_latest = True - else: - make_latest = False - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"make-latest={json.dumps(make_latest)}\n") - - artifacts_to_upload = [] - for artifact in artifacts_path.iterdir(): - if artifact.suffix == ".patch": - continue - if artifact.name == release_message_path.name: - continue - artifacts_to_upload.append(str(artifact.resolve())) - - with open(github_output, "a", encoding="utf-8") as wfh: - wfh.write(f"release-artifacts={','.join(artifacts_to_upload)}\n") - ctx.exit(0) - - -@repo.command( - name="confirm-unreleased", - arguments={ - "salt_version": { - "help": "The salt version to check", - }, - "repository": { - "help": ( - "The full repository name, ie, 'saltstack/salt' on GitHub " - "to run the checks against." - ) - }, - }, -) -def confirm_unreleased( - ctx: Context, salt_version: str, repository: str = "saltstack/salt" -): - """ - Confirm that the passed version is not yet tagged and/or released. - """ - releases = _get_salt_releases(ctx, repository) - if Version(salt_version) in releases: - ctx.error(f"There's already a '{salt_version}' tag or github release.") - ctx.exit(1) - ctx.info(f"Could not find a release for Salt Version '{salt_version}'") - ctx.exit(0) - - -def _get_salt_releases(ctx: Context, repository: str) -> list[Version]: - """ - Return a list of salt versions - """ - versions = set() - with ctx.web as web: - web.headers.update({"Accept": "application/vnd.github+json"}) - ret = web.get(f"https://api.github.com/repos/{repository}/tags") - if ret.status_code != 200: - ctx.error( - f"Failed to get the tags for repository {repository!r}: {ret.reason}" - ) - ctx.exit(1) - for tag in ret.json(): - name = tag["name"] - if name.startswith("v"): - name = name[1:] - if "-" in name: - # We're not going to parse dash tags - continue - if "docs" in name: - # We're not going to consider doc tags - continue - versions.add(Version(name)) - - # Now let's go through the github releases - ret = web.get(f"https://api.github.com/repos/{repository}/releases") - if ret.status_code != 200: - ctx.error( - f"Failed to get the releases for repository {repository!r}: {ret.reason}" - ) - ctx.exit(1) - for release in ret.json(): - name = release["name"] - if name.startswith("v"): - name = name[1:] - if not name: - print(123, release) - if name and "-" not in name and "docs" not in name: - # We're not going to parse dash or docs releases - versions.add(Version(name)) - name = release["tag_name"] - if "-" not in name and "docs" not in name: - # We're not going to parse dash or docs releases - versions.add(Version(name)) - return sorted(versions) - - -def _get_repo_detailed_file_list( - bucket_name: str, - bucket_folder: str = "", - glob_match: str = "**", -) -> list[dict[str, Any]]: - s3 = boto3.client("s3") - listing: list[dict[str, Any]] = [] - continuation_token = None - while True: - kwargs: dict[str, str] = {} - if continuation_token: - kwargs["ContinuationToken"] = continuation_token - ret = s3.list_objects_v2( - Bucket=bucket_name, - Prefix=bucket_folder, - FetchOwner=False, - **kwargs, - ) - contents = ret.pop("Contents", None) - if contents is None: - break - for entry in contents: - if fnmatch.fnmatch(entry["Key"], glob_match): - listing.append(entry) - if not ret["IsTruncated"]: - break - continuation_token = ret["NextContinuationToken"] - return listing - - -def _get_repo_file_list( - bucket_name: str, bucket_folder: str, glob_match: str -) -> list[str]: - return [ - entry["Key"] - for entry in _get_repo_detailed_file_list( - bucket_name, bucket_folder, glob_match=glob_match - ) - ] - - -def _get_remote_versions(bucket_name: str, remote_path: str): - remote_path = str(remote_path) - if not remote_path.endswith("/"): - remote_path += "/" - - s3 = boto3.client("s3") - ret = s3.list_objects( - Bucket=bucket_name, - Delimiter="/", - Prefix=remote_path, - ) - if "CommonPrefixes" not in ret: - return [] - versions = [] - for entry in ret["CommonPrefixes"]: - _, version = entry["Prefix"].rstrip("/").rsplit("/", 1) - if version == "latest": - continue - versions.append(Version(version)) - versions.sort(reverse=True) - return versions - - -def _create_onedir_based_repo( - ctx: Context, - salt_version: str, - nightly_build: bool, - rc_build: bool, - repo_path: pathlib.Path, - incoming: pathlib.Path, - key_id: str, - distro: str, - pkg_suffixes: tuple[str, ...], -): - ctx.info("Creating repository directory structure ...") - create_repo_path = _create_repo_path( - repo_path, salt_version, distro, rc_build=rc_build, nightly_build=nightly_build - ) - if nightly_build is False: - repo_json_path = create_repo_path.parent.parent / "repo.json" - else: - repo_json_path = create_repo_path.parent / "repo.json" - - if nightly_build: - bucket_name = tools.utils.NIGHTLY_BUCKET_NAME - else: - bucket_name = tools.utils.STAGING_BUCKET_NAME - - release_json = {} - - copy_exclusions = ( - ".blake2b", - ".sha512", - ".sha3_512", - ".BLAKE2B", - ".SHA512", - ".SHA3_512", - ".json", - ) - hashes_base_path = create_repo_path / f"salt-{salt_version}" - for fpath in incoming.iterdir(): - if fpath.suffix in copy_exclusions: - continue - ctx.info(f"* Processing {fpath} ...") - dpath = create_repo_path / fpath.name - ctx.info(f"Copying {fpath} to {dpath} ...") - shutil.copyfile(fpath, dpath) - if "-amd64" in dpath.name.lower(): - arch = "amd64" - elif "-x86_64" in dpath.name.lower(): - arch = "x86_64" - elif "-x86" in dpath.name.lower(): - arch = "x86" - elif "-aarch64" in dpath.name.lower(): - arch = "aarch64" - else: - ctx.error( - f"Cannot pickup the right architecture from the filename '{dpath.name}'." - ) - ctx.exit(1) - if distro == "onedir": - if "-onedir-linux-" in dpath.name.lower(): - release_os = "linux" - elif "-onedir-darwin-" in dpath.name.lower(): - release_os = "macos" - elif "-onedir-windows-" in dpath.name.lower(): - release_os = "windows" - else: - ctx.error( - f"Cannot pickup the right OS from the filename '{dpath.name}'." - ) - ctx.exit(1) - else: - release_os = distro - release_json[dpath.name] = { - "name": dpath.name, - "version": salt_version, - "os": release_os, - "arch": arch, - } - for hash_name in ("blake2b", "sha512", "sha3_512"): - ctx.info(f" * Calculating {hash_name} ...") - hexdigest = _get_file_checksum(fpath, hash_name) - release_json[dpath.name][hash_name.upper()] = hexdigest - with open(f"{hashes_base_path}_{hash_name.upper()}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - with open(f"{dpath}.{hash_name}", "a+") as wfh: - wfh.write(f"{hexdigest} {dpath.name}\n") - - for fpath in create_repo_path.iterdir(): - if fpath.suffix in pkg_suffixes: - continue - tools.utils.gpg_sign(ctx, key_id, fpath) - - # Export the GPG key in use - tools.utils.export_gpg_key(ctx, key_id, create_repo_path) - - repo_json = _get_repo_json_file_contents( - ctx, bucket_name=bucket_name, repo_path=repo_path, repo_json_path=repo_json_path - ) - if nightly_build is True: - latest_link = create_repo_path.parent / "latest" - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - latest_link.symlink_to(create_repo_path.name) - - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) - return - - major_version = Version(salt_version).major - minor_repo_json_path = create_repo_path.parent / "repo.json" - minor_repo_json = _get_repo_json_file_contents( - ctx, - bucket_name=bucket_name, - repo_path=repo_path, - repo_json_path=minor_repo_json_path, - ) - minor_repo_json[salt_version] = release_json - versions = _parse_versions(*list(minor_repo_json)) - ctx.info( - f"Collected versions from {minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in versions)}" - ) - minor_versions = [v for v in versions if v.major == major_version] - ctx.info( - f"Collected versions(Matching major: {major_version}) from {minor_repo_json_path.relative_to(repo_path)}: " - f"{', '.join(str(vs) for vs in minor_versions)}" - ) - if not versions: - latest_version = Version(salt_version) - else: - latest_version = versions[0] - if not minor_versions: - latest_minor_version = Version(salt_version) - else: - latest_minor_version = minor_versions[0] - - ctx.info(f"Release Version: {salt_version}") - ctx.info(f"Latest Repo Version: {latest_version}") - ctx.info(f"Latest Release Minor Version: {latest_minor_version}") - - latest_link = create_repo_path.parent.parent / "latest" - if latest_version <= salt_version: - repo_json["latest"] = release_json - ctx.info(f"Creating '{latest_link.relative_to(repo_path)}' symlink ...") - if latest_link.exists(): - latest_link.unlink() - latest_link.symlink_to(f"minor/{salt_version}") - else: - ctx.info( - f"Not creating the '{latest_link.relative_to(repo_path)}' symlink " - f"since {latest_version} > {salt_version}" - ) - - major_link = create_repo_path.parent.parent / str(major_version) - if latest_minor_version <= salt_version: - minor_repo_json["latest"] = release_json - # This is the latest minor, update the major in the top level repo.json - # to this version - repo_json[str(major_version)] = release_json - ctx.info(f"Creating '{major_link.relative_to(repo_path)}' symlink ...") - if major_link.exists(): - major_link.unlink() - major_link.symlink_to(f"minor/{salt_version}") - else: - ctx.info( - f"Not creating the '{major_link.relative_to(repo_path)}' symlink " - f"since {latest_minor_version} > {salt_version}" - ) - - ctx.info(f"Writing {minor_repo_json_path} ...") - minor_repo_json_path.write_text(json.dumps(minor_repo_json, sort_keys=True)) - - ctx.info(f"Writing {repo_json_path} ...") - repo_json_path.write_text(json.dumps(repo_json, sort_keys=True)) - - -def _get_repo_json_file_contents( - ctx: Context, - bucket_name: str, - repo_path: pathlib.Path, - repo_json_path: pathlib.Path, -) -> dict[str, Any]: - s3 = boto3.client("s3") - repo_json: dict[str, Any] = {} - try: - ret = s3.head_object( - Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path)) - ) - ctx.info( - f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file from bucket {bucket_name}" - ) - size = ret["ContentLength"] - with repo_json_path.open("wb") as wfh: - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Downloading...", total=size) - s3.download_fileobj( - Bucket=bucket_name, - Key=str(repo_json_path.relative_to(repo_path)), - Fileobj=wfh, - Callback=tools.utils.UpdateProgress(progress, task), - ) - with repo_json_path.open() as rfh: - repo_json = json.load(rfh) - except ClientError as exc: - if "Error" not in exc.response: - raise - if exc.response["Error"]["Code"] != "404": - raise - return repo_json - - -def _get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str: - - with fpath.open("rb") as rfh: - try: - digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined] - except AttributeError: - # Python < 3.11 - buf = bytearray(2**18) # Reusable buffer to reduce allocations. - view = memoryview(buf) - digest = getattr(hashlib, hash_name)() - while True: - size = rfh.readinto(buf) - if size == 0: - break # EOF - digest.update(view[:size]) - hexdigest: str = digest.hexdigest() - return hexdigest - - -def _publish_repo( - ctx: Context, - repo_path: pathlib.Path, - nightly_build: bool = False, - rc_build: bool = False, - stage: bool = False, -): - """ - Publish packaging repositories. - """ - if nightly_build: - bucket_name = tools.utils.NIGHTLY_BUCKET_NAME - elif stage: - bucket_name = tools.utils.STAGING_BUCKET_NAME - else: - bucket_name = tools.utils.RELEASE_BUCKET_NAME - - ctx.info("Preparing upload ...") - s3 = boto3.client("s3") - to_delete_paths: dict[pathlib.Path, list[dict[str, str]]] = {} - to_upload_paths: list[pathlib.Path] = [] - for dirpath, dirnames, filenames in os.walk(repo_path, followlinks=True): - for dirname in dirnames: - path = pathlib.Path(dirpath, dirname) - if not path.is_symlink(): - continue - # This is a symlink, then we need to delete all files under - # that directory in S3 because S3 does not understand symlinks - # and we would end up adding files to that folder instead of - # replacing it. - try: - relpath = path.relative_to(repo_path) - ret = s3.list_objects( - Bucket=bucket_name, - Prefix=str(relpath), - ) - if "Contents" not in ret: - continue - objects = [] - for entry in ret["Contents"]: - objects.append({"Key": entry["Key"]}) - to_delete_paths[path] = objects - except ClientError as exc: - if "Error" not in exc.response: - raise - if exc.response["Error"]["Code"] != "404": - raise - - for fpath in filenames: - path = pathlib.Path(dirpath, fpath) - to_upload_paths.append(path) - - with tools.utils.create_progress_bar() as progress: - task = progress.add_task( - "Deleting directories to override.", total=len(to_delete_paths) - ) - for base, objects in to_delete_paths.items(): - relpath = base.relative_to(repo_path) - bucket_uri = f"s3://{bucket_name}/{relpath}" - progress.update(task, description=f"Deleting {bucket_uri}") - try: - ret = s3.delete_objects( - Bucket=bucket_name, - Delete={"Objects": objects}, - ) - except ClientError: - log.exception(f"Failed to delete {bucket_uri}") - finally: - progress.update(task, advance=1) - - try: - ctx.info("Uploading repository ...") - for upload_path in to_upload_paths: - relpath = upload_path.relative_to(repo_path) - size = upload_path.stat().st_size - ctx.info(f" {relpath}") - with tools.utils.create_progress_bar(file_progress=True) as progress: - task = progress.add_task(description="Uploading...", total=size) - s3.upload_file( - str(upload_path), - bucket_name, - str(relpath), - Callback=tools.utils.UpdateProgress(progress, task), - ) - except KeyboardInterrupt: - pass - - -def _create_repo_path( - repo_path: pathlib.Path, - salt_version: str, - distro: str, - distro_version: str | None = None, # pylint: disable=bad-whitespace - distro_arch: str | None = None, # pylint: disable=bad-whitespace - rc_build: bool = False, - nightly_build: bool = False, -): - create_repo_path = repo_path - if nightly_build: - create_repo_path = create_repo_path / "salt-dev" - elif rc_build: - create_repo_path = create_repo_path / "salt_rc" - create_repo_path = create_repo_path / "salt" / "py3" / distro - if distro_version: - create_repo_path = create_repo_path / distro_version - if distro_arch: - create_repo_path = create_repo_path / distro_arch - if nightly_build is False: - create_repo_path = create_repo_path / "minor" / salt_version - else: - create_repo_path = create_repo_path / datetime.utcnow().strftime("%Y-%m-%d") - create_repo_path.mkdir(exist_ok=True, parents=True) - return create_repo_path - - -def _parse_versions(*versions: str) -> list[Version]: - _versions = [] - for version in set(versions): - if version == "latest": - continue - _versions.append(Version(version)) - if _versions: - _versions.sort(reverse=True) - return _versions diff --git a/tools/pre_commit.py b/tools/pre_commit.py deleted file mode 100644 index f79e85b4a571..000000000000 --- a/tools/pre_commit.py +++ /dev/null @@ -1,165 +0,0 @@ -""" -These commands are used by pre-commit. -""" -# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated -from __future__ import annotations - -import logging -import shutil -from typing import TYPE_CHECKING, cast - -from jinja2 import Environment, FileSystemLoader -from ptscripts import Context, command_group - -import tools.utils - -log = logging.getLogger(__name__) - -WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows" -TEMPLATES = WORKFLOWS / "templates" - -# Define the command group -cgroup = command_group( - name="pre-commit", help="Pre-Commit Related Commands", description=__doc__ -) - - -class NeedsTracker: - def __init__(self): - self._needs = [] - - def append(self, need): - if need not in self._needs: - self._needs.append(need) - - def iter(self, consume=False): - if consume is False: - for need in self._needs: - yield need - return - while self._needs: - need = self._needs.pop(0) - yield need - - def __bool__(self): - return bool(self._needs) - - -@cgroup.command( - name="generate-workflows", -) -def generate_workflows(ctx: Context): - """ - Generate GitHub Actions Workflows - """ - workflows = { - "CI": { - "template": "ci.yml", - }, - "Nightly": { - "template": "nightly.yml", - }, - "Stage Release": { - "slug": "staging", - "template": "staging.yml", - "includes": { - "test-pkg-uploads": False, - }, - }, - "Scheduled": { - "template": "scheduled.yml", - }, - "Check Workflow Run": { - "template": "check-workflow-run.yml", - }, - "Release": { - "template": "release.yml", - "includes": { - "pre-commit": False, - "lint": False, - "pkg-tests": False, - "salt-tests": False, - "test-pkg-uploads": False, - }, - }, - } - env = Environment( - block_start_string="<%", - block_end_string="%>", - variable_start_string="<{", - variable_end_string="}>", - extensions=[ - "jinja2.ext.do", - ], - loader=FileSystemLoader(str(TEMPLATES)), - ) - for workflow_name, details in workflows.items(): - if TYPE_CHECKING: - assert isinstance(details, dict) - template: str = cast(str, details["template"]) - includes: dict[str, bool] = cast(dict, details.get("includes") or {}) - workflow_path = WORKFLOWS / template - template_path = TEMPLATES / f"{template}.jinja" - ctx.info( - f"Generating '{workflow_path.relative_to(tools.utils.REPO_ROOT)}' from " - f"template '{template_path.relative_to(tools.utils.REPO_ROOT)}' ..." - ) - context = { - "template": template_path.relative_to(tools.utils.REPO_ROOT), - "workflow_name": workflow_name, - "workflow_slug": ( - details.get("slug") or workflow_name.lower().replace(" ", "-") - ), - "includes": includes, - "conclusion_needs": NeedsTracker(), - "test_salt_needs": NeedsTracker(), - "test_salt_pkg_needs": NeedsTracker(), - "test_repo_needs": NeedsTracker(), - "prepare_workflow_needs": NeedsTracker(), - "build_repo_needs": NeedsTracker(), - } - if workflow_name == "Check Workflow Run": - check_workflow_exclusions = { - "Release", - workflow_name, - } - check_workflows = [ - wf for wf in sorted(workflows) if wf not in check_workflow_exclusions - ] - context["check_workflows"] = check_workflows - loaded_template = env.get_template(template_path.name) - rendered_template = loaded_template.render(**context) - workflow_path.write_text(rendered_template.rstrip() + "\n") - - -@cgroup.command( - name="actionlint", - arguments={ - "files": { - "help": "Files to run actionlint against", - "nargs": "*", - }, - "no_color": { - "help": "Disable colors in output", - }, - }, -) -def actionlint(ctx: Context, files: list[str], no_color: bool = False): - """ - Run `actionlint` - """ - actionlint = shutil.which("actionlint") - if not actionlint: - ctx.warn("Could not find the 'actionlint' binary") - ctx.exit(0) - cmdline = [actionlint] - if no_color is False: - cmdline.append("-color") - shellcheck = shutil.which("shellcheck") - if shellcheck: - cmdline.append(f"-shellcheck={shellcheck}") - pyflakes = shutil.which("pyflakes") - if pyflakes: - cmdline.append(f"-pyflakes={pyflakes}") - ret = ctx.run(*cmdline, *files, check=False) - ctx.exit(ret.returncode) diff --git a/tools/precommit/__init__.py b/tools/precommit/__init__.py new file mode 100644 index 000000000000..c10eadeb4797 --- /dev/null +++ b/tools/precommit/__init__.py @@ -0,0 +1,49 @@ +""" +These commands, and sub-commands, are used by pre-commit. +""" +from ptscripts import command_group + +import tools.utils + +# Define the command group +cgroup = command_group( + name="pre-commit", help="Pre-Commit Related Commands", description=__doc__ +) + +SALT_BASE_PATH = tools.utils.REPO_ROOT / "salt" + +SALT_INTERNAL_LOADERS_PATHS = ( + # This is a 1:1 copy of SALT_INTERNAL_LOADERS_PATHS found in salt/loader/__init__.py + str(SALT_BASE_PATH / "auth"), + str(SALT_BASE_PATH / "beacons"), + str(SALT_BASE_PATH / "cache"), + str(SALT_BASE_PATH / "client" / "ssh" / "wrapper"), + str(SALT_BASE_PATH / "cloud" / "clouds"), + str(SALT_BASE_PATH / "engines"), + str(SALT_BASE_PATH / "executors"), + str(SALT_BASE_PATH / "fileserver"), + str(SALT_BASE_PATH / "grains"), + str(SALT_BASE_PATH / "log_handlers"), + str(SALT_BASE_PATH / "matchers"), + str(SALT_BASE_PATH / "metaproxy"), + str(SALT_BASE_PATH / "modules"), + str(SALT_BASE_PATH / "netapi"), + str(SALT_BASE_PATH / "output"), + str(SALT_BASE_PATH / "pillar"), + str(SALT_BASE_PATH / "proxy"), + str(SALT_BASE_PATH / "queues"), + str(SALT_BASE_PATH / "renderers"), + str(SALT_BASE_PATH / "returners"), + str(SALT_BASE_PATH / "roster"), + str(SALT_BASE_PATH / "runners"), + str(SALT_BASE_PATH / "sdb"), + str(SALT_BASE_PATH / "serializers"), + str(SALT_BASE_PATH / "spm" / "pkgdb"), + str(SALT_BASE_PATH / "spm" / "pkgfiles"), + str(SALT_BASE_PATH / "states"), + str(SALT_BASE_PATH / "thorium"), + str(SALT_BASE_PATH / "tokens"), + str(SALT_BASE_PATH / "tops"), + str(SALT_BASE_PATH / "utils"), + str(SALT_BASE_PATH / "wheel"), +) diff --git a/tools/precommit/changelog.py b/tools/precommit/changelog.py new file mode 100644 index 000000000000..5e108af5f115 --- /dev/null +++ b/tools/precommit/changelog.py @@ -0,0 +1,146 @@ +""" +These commands are used to validate changelog entries +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import logging +import pathlib +import re +import sys + +from ptscripts import Context, command_group + +import tools.utils + +log = logging.getLogger(__name__) + +CHANGELOG_LIKE_RE = re.compile(r"([\d]+)\.([a-z]+)$") +CHANGELOG_TYPES = ( + "removed", + "deprecated", + "changed", + "fixed", + "added", + "security", +) +CHANGELOG_ENTRY_RE = re.compile( + r"([\d]+|(CVE|cve)-[\d]{{4}}-[\d]+)\.({})(\.md)?$".format("|".join(CHANGELOG_TYPES)) +) + +# Define the command group +changelog = command_group( + name="changelog", + help="Changelog tools", + description=__doc__, + venv_config={ + "requirements_files": [ + tools.utils.REPO_ROOT + / "requirements" + / "static" + / "ci" + / "py{}.{}".format(*sys.version_info) + / "changelog.txt" + ], + }, + parent="pre-commit", +) + + +@changelog.command( + name="pre-commit-checks", + arguments={ + "files": { + "nargs": "*", + } + }, +) +def check_changelog_entries(ctx: Context, files: list[pathlib.Path]): + """ + Run pre-commit checks on changelog snippets. + """ + docs_path = tools.utils.REPO_ROOT / "doc" + tests_integration_files_path = ( + tools.utils.REPO_ROOT / "tests" / "integration" / "files" + ) + changelog_entries_path = tools.utils.REPO_ROOT / "changelog" + exitcode = 0 + for entry in files: + path = pathlib.Path(entry).resolve() + # Is it under changelog/ + try: + path.relative_to(changelog_entries_path) + if path.name in (".keep", ".template.jinja"): + # This is the file we use so git doesn't delete the changelog/ directory + continue + # Is it named properly + if not CHANGELOG_ENTRY_RE.match(path.name): + ctx.error( + "The changelog entry '{}' should have one of the following extensions: {}.".format( + path.relative_to(tools.utils.REPO_ROOT), + ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), + ), + ) + exitcode = 1 + continue + if path.suffix != ".md": + ctx.error( + f"Please rename '{path.relative_to(tools.utils.REPO_ROOT)}' to " + f"'{path.relative_to(tools.utils.REPO_ROOT)}.md'" + ) + exitcode = 1 + continue + except ValueError: + # No, carry on + pass + # Does it look like a changelog entry + if CHANGELOG_LIKE_RE.match(path.name) and not CHANGELOG_ENTRY_RE.match( + path.name + ): + try: + # Is this under doc/ + path.relative_to(docs_path) + # Yes, carry on + continue + except ValueError: + # No, resume the check + pass + try: + # Is this under tests/integration/files + path.relative_to(tests_integration_files_path) + # Yes, carry on + continue + except ValueError: + # No, resume the check + pass + ctx.error( + "The changelog entry '{}' should have one of the following extensions: {}.".format( + path.relative_to(tools.utils.REPO_ROOT), + ", ".join(f"{ext}.md" for ext in CHANGELOG_TYPES), + ) + ) + exitcode = 1 + continue + # Is it a changelog entry + if not CHANGELOG_ENTRY_RE.match(path.name): + # No? Carry on + continue + # Is the changelog entry in the right path? + try: + path.relative_to(changelog_entries_path) + except ValueError: + exitcode = 1 + ctx.error( + "The changelog entry '{}' should be placed under '{}/', not '{}'".format( + path.name, + changelog_entries_path.relative_to(tools.utils.REPO_ROOT), + path.relative_to(tools.utils.REPO_ROOT).parent, + ) + ) + if path.suffix != ".md": + ctx.error( + f"Please rename '{path.relative_to(tools.utils.REPO_ROOT)}' to " + f"'{path.relative_to(tools.utils.REPO_ROOT)}.md'" + ) + exitcode = 1 + ctx.exit(exitcode) diff --git a/tasks/docs.py b/tools/precommit/docs.py similarity index 71% rename from tasks/docs.py rename to tools/precommit/docs.py index 323d14a0a1f2..a549a6cecf34 100644 --- a/tasks/docs.py +++ b/tools/precommit/docs.py @@ -1,9 +1,8 @@ """ - tasks.docstrings - ~~~~~~~~~~~~~~~~ - - Check salt code base for for missing or wrong docstrings +Check salt code base for for missing or wrong docs """ +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations import ast import collections @@ -11,21 +10,18 @@ import pathlib import re -from invoke import task # pylint: disable=3rd-party-module-not-gated - -from tasks import utils +from ptscripts import Context, command_group -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -DOCS_DIR = CODE_DIR / "doc" -SALT_CODE_DIR = CODE_DIR / "salt" +import tools.utils -os.chdir(str(CODE_DIR)) +DOCS_DIR = tools.utils.REPO_ROOT / "doc" +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" -python_module_to_doc_path = {} -doc_path_to_python_module = {} +PYTHON_MODULE_TO_DOC_PATH = {} +DOC_PATH_TO_PYTHON_MODULE = {} -check_paths = ( +CHECK_PATHS = ( "salt/auth", "salt/beacons", "salt/cache", @@ -52,12 +48,14 @@ "salt/tops", "salt/wheel", ) -exclude_paths = ( +EXCLUDE_PATHS = ( "salt/cloud/cli.py", "salt/cloud/exceptions.py", "salt/cloud/libcloudfuncs.py", ) +cgroup = command_group(name="docs", help=__doc__, parent="pre-commit") + def build_path_cache(): """ @@ -65,13 +63,13 @@ def build_path_cache(): """ for path in SALT_CODE_DIR.rglob("*.py"): - path = path.resolve().relative_to(CODE_DIR) + path = path.resolve().relative_to(tools.utils.REPO_ROOT) strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue parts = list(path.parts) @@ -113,32 +111,21 @@ def build_path_cache(): / "all" / str(path).replace(".py", ".rst").replace(os.sep, ".") ) - stub_path = stub_path.relative_to(CODE_DIR) - python_module_to_doc_path[path] = stub_path + stub_path = stub_path.relative_to(tools.utils.REPO_ROOT) + PYTHON_MODULE_TO_DOC_PATH[path] = stub_path if path.exists(): - doc_path_to_python_module[stub_path] = path + DOC_PATH_TO_PYTHON_MODULE[stub_path] = path build_path_cache() def build_file_list(files, extension): - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - for spath in path.split(): - if not spath.endswith(extension): - continue - _files.append(spath) - if not _files: - _files = CODE_DIR.rglob("*{}".format(extension)) + if not files: + _files = tools.utils.REPO_ROOT.rglob("*{}".format(extension)) else: - _files = [pathlib.Path(fname).resolve() for fname in _files] - _files = [path.relative_to(CODE_DIR) for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == extension] + _files = [path.relative_to(tools.utils.REPO_ROOT) for path in _files] return _files @@ -148,9 +135,9 @@ def build_python_module_paths(files): strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue _files.append(path) return _files @@ -160,8 +147,7 @@ def build_docs_paths(files): return build_file_list(files, ".rst") -@task(iterable=["files"], positional=["files"]) -def check_inline_markup(ctx, files): +def check_inline_markup(ctx: Context, files: list[pathlib.Path]) -> int: """ Check docstring for :doc: usage @@ -174,9 +160,6 @@ def check_inline_markup(ctx, files): https://github.com/saltstack/salt/issues/12788 """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - files = build_python_module_paths(files) exitcode = 0 @@ -188,18 +171,14 @@ def check_inline_markup(ctx, files): if not docstring: continue if ":doc:" in docstring: - utils.error( - "The {} function in {} contains ':doc:' usage", funcdef.name, path + ctx.error( + f"The {funcdef.name} function in {path} contains ':doc:' usage" ) exitcode += 1 return exitcode -@task(iterable=["files"]) -def check_stubs(ctx, files): - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - +def check_stubs(ctx: Context, files: list[pathlib.Path]) -> int: files = build_python_module_paths(files) exitcode = 0 @@ -207,21 +186,20 @@ def check_stubs(ctx, files): strpath = str(path) if strpath.endswith("__init__.py"): continue - if not strpath.startswith(check_paths): + if not strpath.startswith(CHECK_PATHS): continue - if strpath.startswith(exclude_paths): + if strpath.startswith(EXCLUDE_PATHS): continue - stub_path = python_module_to_doc_path[path] + stub_path = PYTHON_MODULE_TO_DOC_PATH[path] if not stub_path.exists(): exitcode += 1 - utils.error( - "The module at {} does not have a sphinx stub at {}", path, stub_path + ctx.error( + f"The module at {path} does not have a sphinx stub at {stub_path}" ) return exitcode -@task(iterable=["files"]) -def check_virtual(ctx, files): +def check_virtual(ctx: Context, files: list[pathlib.Path]) -> int: """ Check if .rst files for each module contains the text ".. _virtual" indicating it is a virtual doc page, and, in case a module exists by @@ -235,22 +213,16 @@ def check_virtual(ctx, files): try: contents = path.read_text() except Exception as exc: # pylint: disable=broad-except - utils.error( - "Error while processing '{}': {}".format( - path, - exc, - ) - ) + ctx.error(f"Error while processing '{path}': {exc}") exitcode += 1 continue if ".. _virtual-" in contents: try: - python_module = doc_path_to_python_module[path] - utils.error( - "The doc file at {} indicates that it's virtual, yet, there's a" - " python module at {} that will shaddow it.", - path, - python_module, + python_module = DOC_PATH_TO_PYTHON_MODULE[path] + ctx.error( + f"The doc file at {path} indicates that it's virtual, yet, " + f"there's a python module at {python_module} that will " + "shaddow it.", ) exitcode += 1 except KeyError: @@ -259,8 +231,7 @@ def check_virtual(ctx, files): return exitcode -@task(iterable=["files"]) -def check_module_indexes(ctx, files): +def check_module_indexes(ctx: Context, files: list[pathlib.Path]) -> int: exitcode = 0 files = build_docs_paths(files) for path in files: @@ -288,9 +259,8 @@ def check_module_indexes(ctx, files): ) if module_index != sorted(module_index): exitcode += 1 - utils.error( - "The autosummary mods in {} are not properly sorted. Please sort them.", - path, + ctx.error( + f"The autosummary mods in {path} are not properly sorted. Please sort them.", ) module_index_duplicates = [ @@ -298,8 +268,8 @@ def check_module_indexes(ctx, files): ] if module_index_duplicates: exitcode += 1 - utils.error( - "Module index {} contains duplicates: {}", path, module_index_duplicates + ctx.error( + f"Module index {path} contains duplicates: {module_index_duplicates}" ) # Let's check if all python modules are included in the index path_parts = list(path.parts) @@ -320,7 +290,7 @@ def check_module_indexes(ctx, files): package = "log_handlers" path_parts = [] python_package = SALT_CODE_DIR.joinpath(package, *path_parts).relative_to( - CODE_DIR + tools.utils.REPO_ROOT ) modules = set() for module in python_package.rglob("*.py"): @@ -358,26 +328,26 @@ def check_module_indexes(ctx, files): missing_modules_in_index = set(modules) - set(module_index) if missing_modules_in_index: exitcode += 1 - utils.error( - "The module index at {} is missing the following modules: {}", - path, - ", ".join(missing_modules_in_index), + ctx.error( + f"The module index at {path} is missing the following modules: " + f"{', '.join(missing_modules_in_index)}" ) extra_modules_in_index = set(module_index) - set(modules) if extra_modules_in_index: exitcode += 1 - utils.error( - "The module index at {} has extra modules(non existing): {}", - path, - ", ".join(extra_modules_in_index), + ctx.error( + f"The module index at {path} has extra modules(non existing): " + f"{', '.join(extra_modules_in_index)}" ) return exitcode -@task(iterable=["files"]) -def check_stray(ctx, files): +def check_stray(ctx: Context, files: list[pathlib.Path]) -> int: exitcode = 0 - exclude_paths = ( + exclude_pathlib_paths: tuple[pathlib.Path, ...] + exclude_paths: tuple[str, ...] + + exclude_pathlib_paths = ( DOCS_DIR / "_inc", DOCS_DIR / "ref" / "cli" / "_includes", DOCS_DIR / "ref" / "cli", @@ -412,41 +382,50 @@ def check_stray(ctx, files): DOCS_DIR / "ref" / "states" / "writing.rst", DOCS_DIR / "topics", ) - exclude_paths = tuple(str(p.relative_to(CODE_DIR)) for p in exclude_paths) + exclude_paths = tuple( + str(p.relative_to(tools.utils.REPO_ROOT)) for p in exclude_pathlib_paths + ) files = build_docs_paths(files) for path in files: - if not str(path).startswith(str((DOCS_DIR / "ref").relative_to(CODE_DIR))): + if not str(path).startswith( + str((DOCS_DIR / "ref").relative_to(tools.utils.REPO_ROOT)) + ): continue if str(path).startswith(exclude_paths): continue if path.name in ("index.rst", "glossary.rst", "faq.rst", "README.rst"): continue - try: - python_module = doc_path_to_python_module[path] - except KeyError: + if path not in DOC_PATH_TO_PYTHON_MODULE: contents = path.read_text() if ".. _virtual-" in contents: continue exitcode += 1 - utils.error( - "The doc at {} doesn't have a corresponding python module and is" - " considered a stray doc. Please remove it.", - path, + ctx.error( + f"The doc at {path} doesn't have a corresponding python module " + "and is considered a stray doc. Please remove it." ) return exitcode -@task(iterable=["files"]) -def check(ctx, files): +@cgroup.command( + name="check", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + } + }, +) +def check(ctx: Context, files: list[pathlib.Path]) -> None: exitcode = 0 - utils.info("Checking inline :doc: markup") + ctx.info("Checking inline :doc: markup") exitcode += check_inline_markup(ctx, files) - utils.info("Checking python module stubs") + ctx.info("Checking python module stubs") exitcode += check_stubs(ctx, files) - utils.info("Checking virtual modules") + ctx.info("Checking virtual modules") exitcode += check_virtual(ctx, files) - utils.info("Checking stray docs") + ctx.info("Checking stray docs") exitcode += check_stray(ctx, files) - utils.info("Checking doc module indexes") + ctx.info("Checking doc module indexes") exitcode += check_module_indexes(ctx, files) - utils.exit_invoke(exitcode) + ctx.exit(exitcode) diff --git a/tasks/docstrings.py b/tools/precommit/docstrings.py similarity index 87% rename from tasks/docstrings.py rename to tools/precommit/docstrings.py index 3aed5c7fa87b..29a7e0eb4e0b 100644 --- a/tasks/docstrings.py +++ b/tools/precommit/docstrings.py @@ -1,28 +1,27 @@ """ - tasks.docstrings - ~~~~~~~~~~~~~~~~ - - Docstrings related tasks +Check salt code base for for missing or wrong docstrings. """ -# pylint: disable=resource-leakage +# Skip mypy checks since it will follow into Salt which doesn't yet have proper types defined +# mypy: ignore-errors +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations import ast import os import pathlib import re +import subprocess import sys from typing import TYPE_CHECKING -from invoke import task # pylint: disable=3rd-party-module-not-gated +from ptscripts import Context, command_group -from salt.loader import SALT_INTERNAL_LOADERS_PATHS -from salt.version import SaltStackVersion -from tasks import utils +import tools.utils +from tools.precommit import SALT_INTERNAL_LOADERS_PATHS -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -SALT_CODE_DIR = CODE_DIR / "salt" +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" SALT_MODULES_PATH = SALT_CODE_DIR / "modules" -THIS_FILE = pathlib.Path(__file__).relative_to(CODE_DIR) +THIS_FILE = pathlib.Path(__file__).relative_to(tools.utils.REPO_ROOT) MISSING_DOCSTRINGS = { "salt/auth/django.py": ["is_connection_usable"], @@ -141,7 +140,6 @@ "salt/pillar/gpg.py": ["ext_pillar"], "salt/pillar/makostack.py": ["ext_pillar"], "salt/pillar/nacl.py": ["ext_pillar"], - "salt/pillar/stack.py": ["ext_pillar"], "salt/proxy/cisconso.py": ["init"], "salt/proxy/esxi.py": ["is_connected_via_vcenter"], "salt/proxy/fx2.py": ["host"], @@ -297,7 +295,6 @@ "iter_entry_points", ], "salt/utils/error.py": ["pack_exception"], - "salt/utils/etcd_util.py": ["get_conn", "tree"], "salt/utils/find.py": ["path_depth"], "salt/utils/gzip_util.py": ["open_fileobj", "uncompress", "open"], "salt/utils/icinga2.py": ["get_certs_path"], @@ -308,7 +305,6 @@ "regex_escape", ], "salt/utils/listdiffer.py": ["list_diff"], - "salt/utils/master.py": ["get_master_key", "ping_all_connected_minions"], "salt/utils/namecheap.py": [ "atts_to_dict", "get_opts", @@ -332,7 +328,6 @@ ], "salt/utils/openstack/swift.py": ["mkdirs", "check_swift"], "salt/utils/pkg/__init__.py": ["split_comparison"], - "salt/utils/process.py": ["systemd_notify_call", "default_signals"], "salt/utils/profile.py": ["activate_profile", "output_profile"], "salt/utils/pyobjects.py": ["need_salt"], "salt/utils/reclass.py": [ @@ -360,13 +355,6 @@ "salt/utils/ssh.py": ["key_is_encrypted"], "salt/utils/stringio.py": ["is_writable", "is_stringio", "is_readable"], "salt/utils/stringutils.py": ["random"], - "salt/utils/templates.py": [ - "wrap_tmpl_func", - "render_mako_tmpl", - "render_jinja_tmpl", - "render_wempy_tmpl", - ], - "salt/utils/verify.py": ["verify_logs_filter"], "salt/utils/virtualbox.py": [ "machine_get_machinestate_str", "machine_get_machinestate_tuple", @@ -380,13 +368,10 @@ ], "salt/utils/yamlloader.py": ["load"], "salt/utils/yamlloader_old.py": ["load"], - "salt/utils/zeromq.py": ["check_ipc_path_max_len"], } MISSING_EXAMPLES = { "salt/modules/acme.py": ["has", "renew_by", "needs_renewal"], - "salt/modules/ansiblegate.py": ["help", "list_"], "salt/modules/apkpkg.py": ["purge"], - "salt/modules/aptpkg.py": ["expand_repo_def"], "salt/modules/arista_pyeapi.py": ["get_connection"], "salt/modules/artifactory.py": [ "get_latest_release", @@ -475,7 +460,6 @@ "salt/modules/boto_ssm.py": ["get_parameter", "delete_parameter", "put_parameter"], "salt/modules/capirca_acl.py": ["get_filter_pillar", "get_term_pillar"], "salt/modules/ceph.py": ["zap"], - "salt/modules/chroot.py": ["exist"], "salt/modules/ciscoconfparse_mod.py": [ "find_objects", "find_objects_wo_child", @@ -489,7 +473,6 @@ "set_data_value", "apply_rollback", ], - "salt/modules/cp.py": ["envs", "recv", "recv_chunked"], "salt/modules/cryptdev.py": ["active"], "salt/modules/datadog_api.py": ["post_event"], "salt/modules/defaults.py": ["deepcopy", "update"], @@ -608,7 +591,6 @@ "salt/modules/napalm_probes.py": ["delete_probes", "schedule_probes", "set_probes"], "salt/modules/netbox.py": ["get_", "filter_", "slugify"], "salt/modules/netmiko_mod.py": ["call", "multi_call", "get_connection"], - "salt/modules/network.py": ["fqdns"], "salt/modules/neutronng.py": [ "get_openstack_cloud", "compare_changes", @@ -763,21 +745,13 @@ "register_vm", "get_vm_config", "get_vm_config_file", - "list_licenses", "compare_vm_configs", "get_advanced_configs", "delete_advanced_configs", - "create_vmfs_datastore", "get_vm", ], "salt/modules/win_pkg.py": ["get_package_info"], "salt/modules/win_timezone.py": ["zone_compare"], - "salt/modules/zabbix.py": [ - "substitute_params", - "get_zabbix_id_mapper", - "get_object_id_by_params", - "compare_params", - ], "salt/modules/zk_concurrency.py": [ "lock", "party_members", @@ -827,8 +801,17 @@ Whatever approach you decide to take, just drop a comment in the PR letting us know! """ +cgroup = command_group(name="docstrings", help=__doc__, parent="pre-commit") -def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: str): + +def annotate( + ctx: Context, + kind: str, + fpath: pathlib.Path, + start_lineno: int, + end_lineno: int, + message: str, +) -> None: if kind not in ("warning", "error"): raise RuntimeError("The annotation kind can only be one of 'warning', 'error'.") if os.environ.get("GH_ACTIONS_ANNOTATE") is None: @@ -836,7 +819,7 @@ def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: github_output = os.environ.get("GITHUB_OUTPUT") if github_output is None: - utils.warn("The 'GITHUB_OUTPUT' variable is not set. Not adding annotations.") + ctx.warn("The 'GITHUB_OUTPUT' variable is not set. Not adding annotations.") return if TYPE_CHECKING: @@ -846,40 +829,51 @@ def annotate(kind: str, fpath: str, start_lineno: int, end_lineno: int, message: message.rstrip().replace("%", "%25").replace("\r", "%0D").replace("\n", "%0A") ) # Print it to stdout so that the GitHub runner pick's it up and adds the annotation - print( + ctx.print( f"::{kind} file={fpath},line={start_lineno},endLine={end_lineno}::{message}", - file=sys.stdout, - flush=True, + soft_wrap=True, ) -@task(iterable=["files"], positional=["files"]) -def check(ctx, files, check_proper_formatting=False, error_on_known_failures=False): +@cgroup.command( + name="check", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + }, + "suppress_warnings": { + "help": "Supress warning messages on known issues", + }, + "check_proper_formatting": { + "help": "Run formatting checks on docstrings", + }, + "error_on_known_failures": { + "help": "Raise an error on known failures", + }, + }, +) +def check_docstrings( + ctx: Context, + files: list[pathlib.Path], + suppress_warnings: bool = False, + check_proper_formatting: bool = False, + error_on_known_failures: bool = False, +) -> None: """ Check salt's docstrings """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - _files.extend(path.split()) - if not _files: - _files = SALT_CODE_DIR.rglob("*.py") + if not files: + _files = list(SALT_CODE_DIR.rglob("*.py")) else: - _files = [pathlib.Path(fname) for fname in _files] - - _files = [path.resolve() for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == ".py"] errors = 0 exitcode = 0 warnings = 0 for path in _files: + if str(path).startswith(str(tools.utils.REPO_ROOT / "salt" / "ext")): + continue contents = path.read_text() try: module = ast.parse(path.read_text(), filename=str(path)) @@ -889,10 +883,11 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if error: errors += 1 exitcode = 1 - utils.error( - "The module '{}' does not provide a proper `{}` version: {!r} is not valid.", - path.relative_to(CODE_DIR), - *error, + ctx.error( + "The module '{}' does not provide a proper `{}` version: {!r} is not valid.".format( + path.relative_to(tools.utils.REPO_ROOT), + *error, + ) ) for funcdef in [ @@ -904,17 +899,19 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if error: errors += 1 exitcode = 1 - utils.error( - "The module '{}' does not provide a proper `{}` version: {!r} is not valid.", - path.relative_to(CODE_DIR), - *error, + ctx.error( + "The module '{}' does not provide a proper `{}` version: {!r} is not valid.".format( + path, + *error, + ) ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path, funcdef.lineno, funcdef.body[0].lineno, - "Version {1:r!} is not valid for {0!r}".format(*error), + "Version {1!r} is not valid for {0!r}".format(*error), ) if not str(path).startswith(SALT_INTERNAL_LOADERS_PATHS): @@ -922,7 +919,7 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue funcname = funcdef.name - relpath = str(path.relative_to(CODE_DIR)) + relpath = str(path.relative_to(tools.utils.REPO_ROOT)) # We're dealing with a salt loader module if funcname.startswith("_"): @@ -935,14 +932,14 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal and error_on_known_failures is False ): warnings += 1 - utils.warn( - "The function '{}' on '{}' does not have a docstring", - funcname, - relpath, - ) + if suppress_warnings is False: + ctx.warn( + f"The function '{funcname}' on '{relpath}' does not have a docstring" + ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing docstring", @@ -950,14 +947,13 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue errors += 1 exitcode = 1 - utils.error( - "The function '{}' on '{}' does not have a docstring", - funcname, - relpath, + ctx.error( + f"The function '{funcname}' on '{relpath}' does not have a docstring" ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing docstring", @@ -966,14 +962,12 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal elif funcname in MISSING_DOCSTRINGS.get(relpath, ()): # This was previously a know function with a missing docstring. # Warn about it so that it get's removed from this list - warnings += 1 - utils.warn( - "The function '{}' on '{}' was previously known to not have a docstring, " - "which is no longer the case. Please remove it from 'MISSING_DOCSTRINGS' ." - "in '{}'", - funcname, - relpath, - THIS_FILE, + errors += 1 + exitcode = 1 + ctx.error( + f"The function '{funcname}' on '{relpath}' was previously known to not " + "have a docstring, which is no longer the case. Please remove it from " + f"'MISSING_DOCSTRINGS' in '{THIS_FILE}'" ) try: @@ -993,14 +987,15 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal and error_on_known_failures is False ): warnings += 1 - utils.warn( - "The function '{}' on '{}' does not have a 'CLI Example:' in its docstring", - funcname, - relpath, - ) + if suppress_warnings is False: + ctx.warn( + f"The function '{funcname}' on '{relpath}' does not have a " + "'CLI Example:' in its docstring" + ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing 'CLI Example:' in docstring", @@ -1008,14 +1003,13 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal continue errors += 1 exitcode = 1 - utils.error( - "The function '{}' on '{}' does not have a 'CLI Example:' in its docstring", - funcname, - relpath, + ctx.error( + f"The function '{funcname}' on '{relpath}' does not have a 'CLI Example:' in its docstring" ) annotate( + ctx, "error", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Missing 'CLI Example:' in docstring", @@ -1024,14 +1018,12 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal elif funcname in MISSING_EXAMPLES.get(relpath, ()): # This was previously a know function with a missing CLI example # Warn about it so that it get's removed from this list - warnings += 1 - utils.warn( - "The function '{}' on '{}' was previously known to not have a CLI Example, " - "which is no longer the case. Please remove it from 'MISSING_EXAMPLES'. " - "in '{}'", - funcname, - relpath, - THIS_FILE, + errors += 1 + exitcode = 1 + ctx.error( + f"The function '{funcname}' on '{relpath}' was previously known to not " + "have a CLI Example, which is no longer the case. Please remove it from " + f"'MISSING_EXAMPLES' in '{THIS_FILE}'" ) if check_proper_formatting is False: @@ -1042,20 +1034,22 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal if _check_cli_example_proper_formatting(docstring) is False: errors += 1 exitcode = 1 - utils.error( + ctx.error( "The function {!r} on '{}' does not have a proper 'CLI Example:' section in " "its docstring. The proper format is:\n" "CLI Example:\n" "\n" ".. code-block:: bash\n" "\n" - " salt '*' \n", - funcdef.name, - path.relative_to(CODE_DIR), + " salt '*' \n".format( + funcdef.name, + path.relative_to(tools.utils.REPO_ROOT), + ) ) annotate( + ctx, "warning", - path.relative_to(CODE_DIR), + path.relative_to(tools.utils.REPO_ROOT), funcdef.lineno, funcdef.body[0].lineno, "Wrong format in 'CLI Example:' in docstring.\n" @@ -1072,15 +1066,15 @@ def check(ctx, files, check_proper_formatting=False, error_on_known_failures=Fal path.write_text(contents) if warnings: - utils.warn("Found {} warnings", warnings) + ctx.warn(f"Found {warnings} warnings") if exitcode: - utils.error("Found {} errors", errors) + ctx.error(f"Found {errors} errors") if os.environ.get("GH_ACTIONS_ANNOTATE") and (warnings or errors): github_step_summary = os.environ.get("GITHUB_STEP_SUMMARY") if github_step_summary: with open(github_step_summary, "w", encoding="utf-8") as wfh: wfh.write(SUMMARY) - utils.exit_invoke(exitcode) + ctx.exit(exitcode) CHECK_VALID_VERSION_RE = re.compile( @@ -1095,9 +1089,10 @@ def _check_valid_versions_on_docstrings(docstring): versions = [vs.strip() for vs in version.split(",")] bad_versions = [] for vs in versions: - try: - SaltStackVersion.parse(vs) - except ValueError: + ret = subprocess.run( + [sys.executable, str(SALT_CODE_DIR / "version.py"), vs], check=False + ) + if ret.returncode: bad_versions.append(vs) if bad_versions: return vtype, ", ".join(bad_versions) diff --git a/tools/precommit/filemap.py b/tools/precommit/filemap.py new file mode 100644 index 000000000000..96a662fa7e7e --- /dev/null +++ b/tools/precommit/filemap.py @@ -0,0 +1,91 @@ +""" +`tests/filename_map.yml` validity checks +""" +import pathlib +import re + +import yaml +from ptscripts import Context, command_group + +import tools.utils + +FILENAME_MAP_PATH = tools.utils.REPO_ROOT / "tests" / "filename_map.yml" + +cgroup = command_group(name="filemap", help=__doc__, parent="pre-commit") + + +def _match_to_test_file(match: str) -> pathlib.Path: + tests_path = tools.utils.REPO_ROOT / "tests" + parts = match.split(".") + parts[-1] += ".py" + return tests_path.joinpath(*parts).relative_to(tools.utils.REPO_ROOT) + + +def _check_matches(ctx: Context, rule: str, matches: list[str]) -> int: + errors = 0 + for match in matches: + filematch = _match_to_test_file(match) + if not filematch.exists(): + ctx.error( + f"The match '{match}' for rule '{rule}' points to a non " + f"existing test module path: {filematch}" + ) + errors += 1 + return errors + + +@cgroup.command( + name="check", +) +def check(ctx: Context) -> None: + exitcode = 0 + excludes = ("tools/", "templates/", ".nox/") + full_filelist = [ + path.relative_to(tools.utils.REPO_ROOT) + for path in tools.utils.REPO_ROOT.rglob("*.py") + ] + filelist = [ + str(path) for path in full_filelist if not str(path).startswith(excludes) + ] + filename_map = yaml.safe_load(FILENAME_MAP_PATH.read_text()) + for rule, matches in filename_map.items(): + if rule == "*": + exitcode += _check_matches(ctx, rule, matches) + elif "|" in rule: + # This is regex + for filepath in filelist: + if re.match(rule, filepath): + # Found at least one match, stop looking + break + else: + ctx.error( + f"Could not find a matching file in the salt repo for the rule '{rule}'" + ) + exitcode += 1 + continue + exitcode += _check_matches(ctx, rule, matches) + elif "*" in rule or "\\" in rule: + # Glob matching + process_matches = True + for filerule in tools.utils.REPO_ROOT.glob(rule): + if not filerule.exists(): + ctx.error( + f"The rule '{rule}' points to a non existing path: {filerule}" + ) + exitcode += 1 + process_matches = False + if process_matches: + exitcode += _check_matches(ctx, rule, matches) + else: + # Direct file paths as rules + filerule = pathlib.Path(rule) + if not filerule.exists(): + ctx.error( + f"The rule '{rule}' points to a non existing path: {filerule}" + ) + exitcode += 1 + continue + exitcode += _check_matches(ctx, rule, matches) + if exitcode: + ctx.error(f"Found {exitcode} errors") + ctx.exit(exitcode) diff --git a/tasks/loader.py b/tools/precommit/loader.py similarity index 58% rename from tasks/loader.py rename to tools/precommit/loader.py index d65e5e28591a..bbec9c00f926 100644 --- a/tasks/loader.py +++ b/tools/precommit/loader.py @@ -1,24 +1,35 @@ """ - tasks.loader - ~~~~~~~~~~~~ - - Salt loader checks +Salt loader checks """ import ast import pathlib -from invoke import task # pylint: disable=3rd-party-module-not-gated +from ptscripts import Context, command_group + +import tools.utils +from tools.precommit import SALT_INTERNAL_LOADERS_PATHS -from salt.loader import SALT_INTERNAL_LOADERS_PATHS -from tasks import utils +SALT_CODE_DIR = tools.utils.REPO_ROOT / "salt" -CODE_DIR = pathlib.Path(__file__).resolve().parent.parent -SALT_CODE_DIR = CODE_DIR / "salt" +cgroup = command_group(name="salt-loaders", help=__doc__, parent="pre-commit") -@task(iterable=["files"], positional=["files"]) -def check_virtual(ctx, files, enforce_virtualname=False): +@cgroup.command( + name="check-virtual", + arguments={ + "files": { + "help": "List of files to check", + "nargs": "*", + }, + "enforce_virtualname": { + "help": "Enforce the usage of `__virtualname__`", + }, + }, +) +def check_virtual( + ctx: Context, files: list[pathlib.Path], enforce_virtualname: bool = False +) -> None: """ Check Salt loader modules for a defined `__virtualname__` attribute and `__virtual__` function. @@ -26,23 +37,10 @@ def check_virtual(ctx, files, enforce_virtualname=False): https://github.com/saltstack/salt/blob/27ae8260983b11fe6e32a18e777d550be9fe1dc2/tests/unit/test_virtualname.py """ - # CD into Salt's repo root directory - ctx.cd(CODE_DIR) - - # Unfortunately invoke does not support nargs. - # We migth have been passed --files="foo.py bar.py" - # Turn that into a list of paths - _files = [] - for path in files: - if not path: - continue - _files.extend(path.split()) - if not _files: - _files = SALT_CODE_DIR.rglob("*.py") + if not files: + _files = list(SALT_CODE_DIR.rglob("*.py")) else: - _files = [pathlib.Path(fname) for fname in _files] - - _files = [path.resolve() for path in _files] + _files = [fpath.resolve() for fpath in files if fpath.suffix == ".py"] errors = 0 exitcode = 0 @@ -78,14 +76,15 @@ def check_virtual(ctx, files, enforce_virtualname=False): continue if target.id == "__virtualname__": found_virtualname_attr = True - if node.value.s not in path.name: + if node.value.s not in path.name: # type: ignore[attr-defined] errors += 1 exitcode = 1 - utils.error( + ctx.error( 'The value of the __virtualname__ attribute, "{}"' - " is not part of {}", - node.value.s, - path.name, + " is not part of {}".format( + node.value.s, # type: ignore[attr-defined] + path.name, + ) ) if found_virtualname_attr: break @@ -93,11 +92,10 @@ def check_virtual(ctx, files, enforce_virtualname=False): if not found_virtualname_attr and enforce_virtualname: errors += 1 exitcode = 1 - utils.error( - "The salt loader module {} defines a __virtual__() function but does" - " not define a __virtualname__ attribute", - path.relative_to(CODE_DIR), + ctx.error( + f"The salt loader module {path.relative_to(tools.utils.REPO_ROOT)} defines " + "a __virtual__() function but does not define a __virtualname__ attribute" ) if exitcode: - utils.error("Found {} errors", errors) - utils.exit_invoke(exitcode) + ctx.error(f"Found {errors} errors") + ctx.exit(exitcode) diff --git a/tools/precommit/workflows.py b/tools/precommit/workflows.py new file mode 100644 index 000000000000..4a75dba21483 --- /dev/null +++ b/tools/precommit/workflows.py @@ -0,0 +1,366 @@ +""" +These commands are used for our GitHub Actions workflows. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import logging +import shutil +from typing import TYPE_CHECKING, cast + +import yaml +from jinja2 import Environment, FileSystemLoader +from ptscripts import Context, command_group + +import tools.utils + +log = logging.getLogger(__name__) + +WORKFLOWS = tools.utils.REPO_ROOT / ".github" / "workflows" +TEMPLATES = WORKFLOWS / "templates" + + +# Define the command group +cgroup = command_group( + name="workflows", + help="Pre-Commit GH Actions Workflows Related Commands", + description=__doc__, + parent="pre-commit", +) + + +class NeedsTracker: + def __init__(self): + self._needs = [] + + def append(self, need): + if need not in self._needs: + self._needs.append(need) + + def iter(self, consume=False): + if consume is False: + for need in self._needs: + yield need + return + while self._needs: + need = self._needs.pop(0) + yield need + + def __bool__(self): + return bool(self._needs) + + +@cgroup.command( + name="generate-workflows", +) +def generate_workflows(ctx: Context): + """ + Generate GitHub Actions Workflows + """ + workflows = { + "CI": { + "template": "ci.yml", + }, + "Nightly": { + "template": "nightly.yml", + }, + "Stage Release": { + "slug": "staging", + "template": "staging.yml", + "includes": { + "test-pkg-downloads": True, + }, + }, + "Scheduled": { + "template": "scheduled.yml", + }, + "Release": { + "template": "release.yml", + "includes": { + "pre-commit": False, + "lint": False, + "pkg-tests": False, + "salt-tests": False, + "test-pkg-downloads": True, + }, + }, + "Test Package Downloads": { + "template": "test-package-downloads-action.yml", + }, + } + test_salt_listing = { + "linux": [ + ("almalinux-8", "Alma Linux 8", "x86_64", "no-fips"), + ("almalinux-9", "Alma Linux 9", "x86_64", "no-fips"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64", "no-fips"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64", "no-fips"), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "no-fips"), + ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64", "no-fips"), + ("archlinux-lts", "Arch Linux LTS", "x86_64", "no-fips"), + ("centos-7", "CentOS 7", "x86_64", "no-fips"), + ("centosstream-8", "CentOS Stream 8", "x86_64", "no-fips"), + ("centosstream-9", "CentOS Stream 9", "x86_64", "no-fips"), + ("debian-10", "Debian 10", "x86_64", "no-fips"), + ("debian-11", "Debian 11", "x86_64", "no-fips"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "no-fips"), + ("debian-12", "Debian 12", "x86_64", "no-fips"), + ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "no-fips"), + ("fedora-37", "Fedora 37", "x86_64", "no-fips"), + ("fedora-38", "Fedora 38", "x86_64", "no-fips"), + ("opensuse-15", "Opensuse 15", "x86_64", "no-fips"), + ("photonos-3", "Photon OS 3", "x86_64", "no-fips"), + ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "no-fips"), + ("photonos-4", "Photon OS 4", "x86_64", "fips"), + ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "fips"), + ("photonos-5", "Photon OS 5", "x86_64", "fips"), + ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "fips"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "no-fips"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "no-fips"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "no-fips"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "no-fips"), + ], + "macos": [ + ("macos-12", "macOS 12", "x86_64"), + ("macos-13", "macOS 13", "x86_64"), + ("macos-13-xlarge", "macOS 13 Arm64", "aarch64"), + ], + "windows": [ + ("windows-2016", "Windows 2016", "amd64"), + ("windows-2019", "Windows 2019", "amd64"), + ("windows-2022", "Windows 2022", "amd64"), + ], + } + + test_salt_pkg_listing = { + "linux": [ + ("amazonlinux-2", "Amazon Linux 2", "x86_64", "rpm", "no-fips"), + ( + "amazonlinux-2-arm64", + "Amazon Linux 2 Arm64", + "aarch64", + "rpm", + "no-fips", + ), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64", "rpm", "no-fips"), + ( + "amazonlinux-2023-arm64", + "Amazon Linux 2023 Arm64", + "aarch64", + "rpm", + "no-fips", + ), + ("centos-7", "CentOS 7", "x86_64", "rpm", "no-fips"), + ("centosstream-8", "CentOS Stream 8", "x86_64", "rpm", "no-fips"), + ("centosstream-9", "CentOS Stream 9", "x86_64", "rpm", "no-fips"), + ( + "centosstream-9-arm64", + "CentOS Stream 9 Arm64", + "aarch64", + "rpm", + "no-fips", + ), + ("debian-10", "Debian 10", "x86_64", "deb", "no-fips"), + ("debian-11", "Debian 11", "x86_64", "deb", "no-fips"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64", "deb", "no-fips"), + ("debian-12", "Debian 12", "x86_64", "deb", "no-fips"), + ("debian-12-arm64", "Debian 12 Arm64", "aarch64", "deb", "no-fips"), + ("photonos-3", "Photon OS 3", "x86_64", "rpm", "no-fips"), + ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64", "rpm", "no-fips"), + ("photonos-4", "Photon OS 4", "x86_64", "rpm", "fips"), + ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64", "rpm", "fips"), + ("photonos-5", "Photon OS 5", "x86_64", "rpm", "fips"), + ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64", "rpm", "fips"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64", "deb", "no-fips"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64", "deb", "no-fips"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64", "deb", "no-fips"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64", "deb", "no-fips"), + ], + "macos": [ + ("macos-12", "macOS 12", "x86_64"), + ("macos-13", "macOS 13", "x86_64"), + ("macos-13-xlarge", "macOS 13 Arm64", "aarch64"), + ], + "windows": [ + ("windows-2016", "Windows 2016", "amd64"), + ("windows-2019", "Windows 2019", "amd64"), + ("windows-2022", "Windows 2022", "amd64"), + ], + } + + build_ci_deps_listing = { + "linux": [ + ("almalinux-8", "Alma Linux 8", "x86_64"), + ("almalinux-8-arm64", "Alma Linux 8 Arm64", "aarch64"), + ("almalinux-9", "Alma Linux 9", "x86_64"), + ("almalinux-9-arm64", "Alma Linux 9 Arm64", "aarch64"), + ("amazonlinux-2", "Amazon Linux 2", "x86_64"), + ("amazonlinux-2-arm64", "Amazon Linux 2 Arm64", "aarch64"), + ("amazonlinux-2023", "Amazon Linux 2023", "x86_64"), + ("amazonlinux-2023-arm64", "Amazon Linux 2023 Arm64", "aarch64"), + ("archlinux-lts", "Arch Linux LTS", "x86_64"), + ("centos-7", "CentOS 7", "x86_64"), + ("centos-7-arm64", "CentOS 7 Arm64", "aarch64"), + ("centosstream-8", "CentOS Stream 8", "x86_64"), + ("centosstream-8-arm64", "CentOS Stream 8 Arm64", "aarch64"), + ("centosstream-9", "CentOS Stream 9", "x86_64"), + ("centosstream-9-arm64", "CentOS Stream 9 Arm64", "aarch64"), + ("debian-10", "Debian 10", "x86_64"), + ("debian-11", "Debian 11", "x86_64"), + ("debian-11-arm64", "Debian 11 Arm64", "aarch64"), + ("debian-12", "Debian 12", "x86_64"), + ("debian-12-arm64", "Debian 12 Arm64", "aarch64"), + ("fedora-37", "Fedora 37", "x86_64"), + ("fedora-37-arm64", "Fedora 37 Arm64", "aarch64"), + ("fedora-38", "Fedora 38", "x86_64"), + ("fedora-38-arm64", "Fedora 38 Arm64", "aarch64"), + ("opensuse-15", "Opensuse 15", "x86_64"), + ("photonos-3", "Photon OS 3", "x86_64"), + ("photonos-3-arm64", "Photon OS 3 Arm64", "aarch64"), + ("photonos-4", "Photon OS 4", "x86_64"), + ("photonos-4-arm64", "Photon OS 4 Arm64", "aarch64"), + ("photonos-5", "Photon OS 5", "x86_64"), + ("photonos-5-arm64", "Photon OS 5 Arm64", "aarch64"), + ("ubuntu-20.04", "Ubuntu 20.04", "x86_64"), + ("ubuntu-20.04-arm64", "Ubuntu 20.04 Arm64", "aarch64"), + ("ubuntu-22.04", "Ubuntu 22.04", "x86_64"), + ("ubuntu-22.04-arm64", "Ubuntu 22.04 Arm64", "aarch64"), + ], + "macos": [ + ("macos-12", "macOS 12", "x86_64"), + ("macos-13", "macOS 13", "x86_64"), + ("macos-13-xlarge", "macOS 13 Arm64", "aarch64"), + ], + "windows": [ + ("windows-2016", "Windows 2016", "amd64"), + ("windows-2019", "Windows 2019", "amd64"), + ("windows-2022", "Windows 2022", "amd64"), + ], + } + test_salt_pkg_downloads_listing: dict[str, list[tuple[str, str, str]]] = { + "linux": [], + "macos": [], + "windows": [], + } + rpm_slugs = [ + "almalinux", + "amazonlinux", + "centos", + "centosstream", + "fedora", + "photon", + ] + for slug, display_name, arch in build_ci_deps_listing["linux"]: + if slug in ("archlinux-lts", "opensuse-15"): + continue + test_salt_pkg_downloads_listing["linux"].append((slug, arch, "package")) + # Account for old arm64 repo paths + if arch == "aarch64": + for test_slug in rpm_slugs: + if slug.startswith(test_slug): + test_salt_pkg_downloads_listing["linux"].append( + (slug, "arm64", "package") + ) + break + for slug, display_name, arch in build_ci_deps_listing["linux"][-2:]: + if slug in ("archlinux-lts", "opensuse-15"): + continue + test_salt_pkg_downloads_listing["linux"].append((slug, arch, "onedir")) + for slug, display_name, arch in build_ci_deps_listing["macos"]: + if arch == "aarch64": + arch = "arm64" + test_salt_pkg_downloads_listing["macos"].append((slug, arch, "package")) + for slug, display_name, arch in build_ci_deps_listing["macos"][-1:]: + test_salt_pkg_downloads_listing["macos"].append((slug, arch, "onedir")) + for slug, display_name, arch in build_ci_deps_listing["windows"][-1:]: + for pkg_type in ("nsis", "msi", "onedir"): + test_salt_pkg_downloads_listing["windows"].append((slug, arch, pkg_type)) + + test_salt_pkg_downloads_needs_slugs = set() + for platform in test_salt_pkg_downloads_listing: + for slug, _, _ in test_salt_pkg_downloads_listing[platform]: + test_salt_pkg_downloads_needs_slugs.add(f"{slug.replace('.', '')}-ci-deps") + + env = Environment( + block_start_string="<%", + block_end_string="%>", + variable_start_string="<{", + variable_end_string="}>", + extensions=[ + "jinja2.ext.do", + ], + loader=FileSystemLoader(str(TEMPLATES)), + ) + for workflow_name, details in workflows.items(): + if TYPE_CHECKING: + assert isinstance(details, dict) + template: str = cast(str, details["template"]) + includes: dict[str, bool] = cast(dict, details.get("includes") or {}) + workflow_path = WORKFLOWS / template + template_path = TEMPLATES / f"{template}.jinja" + ctx.info( + f"Generating '{workflow_path.relative_to(tools.utils.REPO_ROOT)}' from " + f"template '{template_path.relative_to(tools.utils.REPO_ROOT)}' ..." + ) + context = { + "template": template_path.relative_to(tools.utils.REPO_ROOT), + "workflow_name": workflow_name, + "workflow_slug": ( + details.get("slug") or workflow_name.lower().replace(" ", "-") + ), + "includes": includes, + "conclusion_needs": NeedsTracker(), + "test_salt_needs": NeedsTracker(), + "test_salt_pkg_needs": NeedsTracker(), + "test_repo_needs": NeedsTracker(), + "prepare_workflow_needs": NeedsTracker(), + "build_repo_needs": NeedsTracker(), + "test_salt_listing": test_salt_listing, + "test_salt_pkg_listing": test_salt_pkg_listing, + "build_ci_deps_listing": build_ci_deps_listing, + "test_salt_pkg_downloads_listing": test_salt_pkg_downloads_listing, + "test_salt_pkg_downloads_needs_slugs": sorted( + test_salt_pkg_downloads_needs_slugs + ), + } + shared_context_file = ( + tools.utils.REPO_ROOT / "cicd" / "shared-gh-workflows-context.yml" + ) + shared_context = yaml.safe_load(shared_context_file.read_text()) + for key, value in shared_context.items(): + context[key] = value + loaded_template = env.get_template(template_path.name) + rendered_template = loaded_template.render(**context) + workflow_path.write_text(rendered_template.rstrip() + "\n") + + +@cgroup.command( + name="actionlint", + arguments={ + "files": { + "help": "Files to run actionlint against", + "nargs": "*", + }, + "no_color": { + "help": "Disable colors in output", + }, + }, +) +def actionlint(ctx: Context, files: list[str], no_color: bool = False): + """ + Run `actionlint` + """ + actionlint = shutil.which("actionlint") + if not actionlint: + ctx.warn("Could not find the 'actionlint' binary") + ctx.exit(0) + cmdline = [actionlint] + if no_color is False: + cmdline.append("-color") + shellcheck = shutil.which("shellcheck") + if shellcheck: + cmdline.append(f"-shellcheck={shellcheck}") + pyflakes = shutil.which("pyflakes") + if pyflakes: + cmdline.append(f"-pyflakes={pyflakes}") + ret = ctx.run(*cmdline, *files, check=False) + ctx.exit(ret.returncode) diff --git a/tools/release.py b/tools/release.py index a6f1ea178471..cc17938d4537 100644 --- a/tools/release.py +++ b/tools/release.py @@ -4,25 +4,20 @@ # pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated from __future__ import annotations +import json import logging +import os import pathlib -import sys +import tempfile +import time +import boto3 +import virustotal3.core +from botocore.exceptions import ClientError from ptscripts import Context, command_group import tools.utils - -try: - import boto3 - from botocore.exceptions import ClientError -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise +import tools.utils.repo log = logging.getLogger(__name__) @@ -106,7 +101,240 @@ def upload_artifacts(ctx: Context, salt_version: str, artifacts_path: pathlib.Pa str(fpath), tools.utils.STAGING_BUCKET_NAME, upload_path, - Callback=tools.utils.UpdateProgress(progress, task), + Callback=tools.utils.repo.UpdateProgress(progress, task), ) except KeyboardInterrupt: pass + + +@release.command( + name="download-onedir-artifact", + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + "platform": { + "help": "The onedir platform archive to download.", + "required": True, + "choices": ("linux", "windows", "darwin", "macos"), + }, + "arch": { + "help": "The onedir arch archive to download.", + "required": True, + }, + }, +) +def download_onedir_artifact( + ctx: Context, salt_version: str, platform: str = "linux", arch: str = "x86_64" +): + """ + Download onedir artifact from staging bucket. + """ + s3 = boto3.client("s3") + if platform == "macos": + platform = "darwin" + if arch == "arm64": + arch = "aarch64" + arch = arch.lower() + platform = platform.lower() + if platform in ("linux", "darwin") and arch not in ("x86_64", "aarch64"): + ctx.error( + f"The 'arch' value for {platform} must be one of: 'x86_64', 'aarch64', 'arm64'" + ) + ctx.exit(1) + if platform == "windows" and arch not in ("x86", "amd64"): + ctx.error(f"The 'arch' value for {platform} must be one of: 'x86', 'amd64'") + ctx.exit(1) + + archive_name = f"salt-{salt_version}-onedir-{platform}-{arch}.tar.xz" + archive_path = tools.utils.REPO_ROOT / "artifacts" / archive_name + if "rc" in salt_version: + prefix = "salt_rc/salt" + else: + prefix = "salt" + remote_path = f"{prefix}/py3/onedir/minor/{salt_version}/{archive_name}" + archive_path.parent.mkdir() + try: + ret = s3.head_object(Bucket=tools.utils.STAGING_BUCKET_NAME, Key=remote_path) + size = ret["ContentLength"] + with archive_path.open("wb") as wfh: + ctx.info( + f"Downloading s3://{tools.utils.STAGING_BUCKET_NAME}/{remote_path} to {archive_path} ..." + ) + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task( + description="Downloading ...", + total=size, + ) + s3.download_fileobj( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Key=remote_path, + Fileobj=wfh, + Callback=tools.utils.repo.UpdateProgress(progress, task), + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {remote_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {remote_path} in bucket.") + ctx.exit(1) + elif exc.response["Error"]["Code"].startswith("4"): + ctx.error(f"Could not download {remote_path} from bucket: {exc}") + ctx.exit(1) + else: + log.exception(f"Failed to download {remote_path}: {exc}") + ctx.exit(1) + + if not archive_path.exists(): + ctx.error(f"The {archive_path} does not exist") + ctx.exit(1) + if not archive_path.stat().st_size: + ctx.error(f"The {archive_path} size is zero!") + ctx.exit(1) + + +@release.command( + name="upload-virustotal", + arguments={ + "salt_version": { + "help": "The salt version to release.", + }, + }, +) +def upload_virustotal(ctx: Context, salt_version: str): + + # Get a list of files to upload + files_to_copy: list[str] + + if salt_version.startswith("v"): + salt_version = salt_version[1:] + + ctx.info("Grabbing remote file listing of files in staging ...") + s3 = boto3.client("s3") + repo_release_files_path = pathlib.Path( + f"release-artifacts/{salt_version}/.release-files.json" + ) + with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: + local_release_files_path = pathlib.Path(tsd) / repo_release_files_path.name + try: + with local_release_files_path.open("wb") as wfh: + ctx.info(f"Downloading file: {repo_release_files_path}") + s3.download_fileobj( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Key=str(repo_release_files_path.as_posix()), + Fileobj=wfh, + ) + files_to_copy = json.loads(local_release_files_path.read_text()) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {repo_release_files_path}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {repo_release_files_path} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error( + f"Could not download {repo_release_files_path} from bucket: {exc}" + ) + ctx.exit(1) + log.exception(f"Error downloading {repo_release_files_path}: {exc}") + ctx.exit(1) + + # If we get approval, we can add RPM and DEB + file_types = [".msi", ".exe", ".pkg"] + files_to_upload = [] + for file in sorted(files_to_copy): + if f"minor/{salt_version}" in file: + if os.path.splitext(file)[1] in file_types: + files_to_upload.append(file) + # These are showing errors for Windows and macOS + # if f"onedir/minor/{salt_version}" in file: + # if file.endswith("tar.xz"): + # files_to_upload.append(file) + + ctx.info("Found the following files to upload:") + for file in files_to_upload: + ctx.info(f"- {os.path.basename(file)}") + + # download each file, then upload to VirusTotal + # This takes around 4 minutes per file + # Maybe we could do this asynchronously + failed_files = {} + for file in files_to_upload: + ctx.info("-" * 80) + download_file = pathlib.Path(file) + with tempfile.TemporaryDirectory(prefix=f"{salt_version}_release_") as tsd: + local_download_file = pathlib.Path(tsd) / download_file.name + try: + with local_download_file.open("wb") as wfh: + ctx.info(f"Downloading from repo: {download_file}") + s3.download_fileobj( + Bucket=tools.utils.STAGING_BUCKET_NAME, + Key=str(download_file.as_posix()), + Fileobj=wfh, + ) + except ClientError as exc: + if "Error" not in exc.response: + log.exception(f"Error downloading {download_file}: {exc}") + ctx.exit(1) + if exc.response["Error"]["Code"] == "404": + ctx.error(f"Could not find {download_file} in bucket.") + ctx.exit(1) + if exc.response["Error"]["Code"] == "400": + ctx.error(f"Could not download {download_file} from bucket: {exc}") + ctx.exit(1) + log.exception(f"Error downloading {download_file}: {exc}") + ctx.exit(1) + + # API key should be an environment variable + api_key = os.environ.get("VIRUSTOTAL_API_KEY") + + ctx.info( + f"Uploading to VirusTotal: {os.path.basename(local_download_file)}" + ) + vt = virustotal3.core.Files(api_key) + response = vt.upload(local_download_file) + + # We want the id + analysis_id = response["data"]["id"] + + # Lets check the results + results = virustotal3.core.get_analysis(api_key, analysis_id) + + status = results["data"]["attributes"]["status"] + + ctx.info("Waiting for results from VirusTotal (takes a few minutes)") + while "completed" not in status: + time.sleep(10) + results = virustotal3.core.get_analysis(api_key, analysis_id) + status = results["data"]["attributes"]["status"] + + ctx.info("Results summary:") + stats = results["data"]["attributes"]["stats"] + + failures = False + for field in stats: + ctx.info(f"- {field}: {stats[field]}") + if field in ["malicious", "suspicious"]: + if stats[field] > 0: + failures = True + + sha256 = results["meta"]["file_info"]["sha256"] + + if failures: + ctx.info("ERROR: VirusTotal scan encountered failures") + failed_files[os.path.basename(local_download_file)] = sha256 + + ctx.info("See details here:") + ctx.info(f"- File: {os.path.basename(local_download_file)}") + ctx.info(f"- URL: https://www.virustotal.com/gui/file/{sha256}") + + if failed_files: + # We want to exit with errors if there are failures + ctx.info("-" * 80) + ctx.info("VirusTotal flagged the following files:") + for file in failed_files: + ctx.info(f"- {file}") + ctx.info(f" https://www.virustotal.com/gui/file/{failed_files[file]}") + ctx.exit(1) diff --git a/tools/testsuite/__init__.py b/tools/testsuite/__init__.py new file mode 100644 index 000000000000..d09d29be9792 --- /dev/null +++ b/tools/testsuite/__init__.py @@ -0,0 +1,162 @@ +""" +These commands are related to the test suite. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import contextlib +import json +import logging +import shutil +import sys +import zipfile +from typing import TYPE_CHECKING + +from ptscripts import Context, command_group + +import tools.utils +import tools.utils.gh +from tools.utils import ExitCode + +with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh: + OS_SLUGS = sorted(json.load(rfh)) + +log = logging.getLogger(__name__) + +# Define the command group +ts = command_group(name="ts", help="Test Suite Related Commands", description=__doc__) + + +@ts.command( + name="setup", + arguments={ + "run_id": { + "help": "The workflow run ID from where to download artifacts from", + "metavar": "RUN_ID_NUMBER", + }, + "branch": { + "help": "The branch from where to look for artifacts.", + "metavar": "BRANCH_NAME", + }, + "pr": { + "help": "The pull-request from where to look for artifacts.", + "metavar": "PR_NUMBER", + }, + "nightly": { + "help": "The nightly build branch from where to look for artifacts.", + "metavar": "BRANCH_NAME", + }, + "platform": { + "help": "The onedir platform artifact to download", + "choices": ("linux", "darwin", "windows"), + "required": True, + }, + "arch": { + "help": "The onedir artifact architecture", + "choices": ("x86_64", "aarch64", "amd64", "x86"), + }, + "slug": { + "help": "The OS slug", + "required": True, + "choices": OS_SLUGS, + }, + "pkg": { + "help": "Also download package test artifacts", + }, + "repository": { + "help": "The repository to query, e.g. saltstack/salt", + }, + }, +) +def setup_testsuite( + ctx: Context, + run_id: int = None, + branch: str = None, + nightly: str = None, + pr: int = None, + platform: str = None, + arch="x86_64", + slug: str = None, + pkg: bool = False, + repository: str = "saltstack/salt", +): + """ + Setup the local test suite. + + Examples: + + * Setup the local checkout for running tests in Photon OS 4, from the artifacts + in a pull request, including the built packages to run package tests: + + tools ts setup --platform linux --slug photonos-4 --pr 64991 --pkg + + * Setup the local checkout for running the tests in Windows 2019, from the + artifacts in the latest nightly build from branch 3006.x + + tools ts setup --platform linux --slug windows-2019 --nightly 3006.x + """ + if TYPE_CHECKING: + assert platform is not None + assert slug is not None + + mutually_exclusive_flags = [ + run_id is not None, + branch is not None, + pr is not None, + nightly is not None, + ] + if not any(mutually_exclusive_flags): + ctx.error("Pass one of '--run-id', '--branch', '--pr' or '--nightly'") + ctx.exit(1) + if len(list(filter(None, mutually_exclusive_flags))) > 1: + ctx.error("Pass only one of '--run-id', '--branch', '--pr' or '--nightly'") + ctx.exit(1) + + if "arm64" in slug: + arch = "aarch64" + + ctx.warn( + "Consider this in preliminary support. There are most likely things to iron out still." + ) + + if run_id is None: + run_id = tools.utils.gh.discover_run_id( + ctx, branch=branch, nightly=nightly, pr=pr + ) + + if run_id is None: + run_id = tools.utils.gh.discover_run_id( + ctx, + branch=branch, + nightly=nightly, + pr=pr, + completed_status=False, + ) + if run_id is None: + ctx.error("Unable to find the appropriate workflow run ID") + else: + ctx.warn( + f"Looks like we found run_id {run_id} but it's not yet in the completed state" + ) + ctx.exit(1) + + exitcode = tools.utils.gh.download_onedir_artifact( + ctx, run_id=run_id, platform=platform, arch=arch, repository=repository + ) + if exitcode and exitcode != ExitCode.SOFT_FAIL: + ctx.exit(exitcode) + exitcode = tools.utils.gh.download_nox_artifact( + ctx, run_id=run_id, slug=slug, nox_env="ci-test-onedir", repository=repository + ) + if exitcode and exitcode != ExitCode.SOFT_FAIL: + ctx.exit(exitcode) + if pkg: + exitcode = tools.utils.gh.download_pkgs_artifact( + ctx, + run_id=run_id, + slug=slug, + arch=arch, + repository=repository, + ) + if exitcode and exitcode != ExitCode.SOFT_FAIL: + ctx.exit(exitcode) diff --git a/tools/testsuite/download.py b/tools/testsuite/download.py new file mode 100644 index 000000000000..edd7652125bc --- /dev/null +++ b/tools/testsuite/download.py @@ -0,0 +1,222 @@ +""" +These commands are related to downloading test suite CI artifacts. +""" +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated +from __future__ import annotations + +import json +import logging +import pathlib +from typing import TYPE_CHECKING + +from ptscripts import Context, command_group + +import tools.utils +import tools.utils.gh + +with tools.utils.REPO_ROOT.joinpath("cicd", "golden-images.json").open() as rfh: + OS_SLUGS = sorted(json.load(rfh)) + +log = logging.getLogger(__name__) + + +# Define the command group +download = command_group( + name="download", + help="Test Suite CI Artifacts Related Commands", + description=__doc__, + parent="ts", +) + + +@download.command( + name="onedir-artifact", + arguments={ + "run_id": { + "help": "The workflow run ID from where to download artifacts from", + "required": True, + }, + "platform": { + "help": "The onedir platform artifact to download", + "choices": ("linux", "darwin", "windows"), + "required": True, + }, + "arch": { + "help": "The onedir artifact architecture", + "choices": ("x86_64", "aarch64", "amd64", "x86"), + }, + "repository": { + "help": "The repository to query, e.g. saltstack/salt", + }, + }, +) +def download_onedir_artifact( + ctx: Context, + run_id: int = None, + platform: str = None, + arch: str = "x86_64", + repository: str = "saltstack/salt", +): + """ + Download CI onedir artifacts. + """ + if TYPE_CHECKING: + assert run_id is not None + assert platform is not None + + exitcode = tools.utils.gh.download_onedir_artifact( + ctx=ctx, run_id=run_id, platform=platform, arch=arch, repository=repository + ) + ctx.exit(exitcode) + + +@download.command( + name="nox-artifact", + arguments={ + "run_id": { + "help": "The workflow run ID from where to download artifacts from", + "required": True, + }, + "slug": { + "help": "The OS slug", + "required": True, + "choices": OS_SLUGS, + }, + "nox_env": { + "help": "The nox environment name.", + }, + "repository": { + "help": "The repository to query, e.g. saltstack/salt", + }, + }, +) +def download_nox_artifact( + ctx: Context, + run_id: int = None, + slug: str = None, + nox_env: str = "ci-test-onedir", + repository: str = "saltstack/salt", +): + """ + Download CI nox artifacts. + """ + if TYPE_CHECKING: + assert run_id is not None + assert slug is not None + + if slug.endswith("arm64"): + slug = slug.replace("-arm64", "") + nox_env += "-aarch64" + + exitcode = tools.utils.gh.download_nox_artifact( + ctx=ctx, run_id=run_id, slug=slug, nox_env=nox_env, repository=repository + ) + ctx.exit(exitcode) + + +@download.command( + name="pkgs-artifact", + arguments={ + "run_id": { + "help": "The workflow run ID from where to download artifacts from", + "required": True, + }, + "slug": { + "help": "The OS slug", + "required": True, + "choices": OS_SLUGS, + }, + "repository": { + "help": "The repository to query, e.g. saltstack/salt", + }, + }, +) +def download_pkgs_artifact( + ctx: Context, + run_id: int = None, + slug: str = None, + repository: str = "saltstack/salt", +): + """ + Download CI built packages artifacts. + """ + if TYPE_CHECKING: + assert run_id is not None + assert slug is not None + + exitcode = tools.utils.gh.download_pkgs_artifact( + ctx=ctx, run_id=run_id, slug=slug, repository=repository + ) + ctx.exit(exitcode) + + +@download.command( + name="artifact", + arguments={ + "artifact_name": { + "help": "The name of the artifact to download", + }, + "dest": { + "help": "The path to the file downloaded", + }, + "run_id": { + "help": "The workflow run ID from where to download artifacts from", + }, + "branch": { + "help": "The branch from where to look for artifacts.", + "metavar": "BRANCH_NAME", + }, + "pr": { + "help": "The pull-request from where to look for artifacts.", + "metavar": "PR_NUMBER", + }, + "nightly": { + "help": "The nightly build branch from where to look for artifacts.", + "metavar": "BRANCH_NAME", + }, + "repository": { + "help": "The repository to query, e.g. saltstack/salt", + }, + }, +) +def download_artifact( + ctx: Context, + artifact_name: pathlib.Path, + dest: pathlib.Path, + run_id: int = None, + branch: str = None, + nightly: str = None, + pr: int = None, + repository: str = "saltstack/salt", +): + """ + Download CI artifacts. + """ + if TYPE_CHECKING: + assert artifact_name is not None + assert dest is not None + + if run_id is not None: + actual_run_id = run_id + else: + potential_run_id = tools.utils.gh.discover_run_id( + ctx, branch=branch, nightly=nightly, pr=pr, repository=repository + ) + if potential_run_id is not None: + actual_run_id = potential_run_id + else: + ctx.exit(1, "Could not discover run ID") + + succeeded = tools.utils.gh.download_artifact( + ctx, + dest, + actual_run_id, + repository=repository, + artifact_name=str(artifact_name), + ) + ctx.info(succeeded) + if succeeded: + ctx.info(f"Downloaded {artifact_name} to {dest}") + ctx.exit(0) + else: + ctx.exit(1) diff --git a/tools/utils.py b/tools/utils.py deleted file mode 100644 index 8c7c220e2b47..000000000000 --- a/tools/utils.py +++ /dev/null @@ -1,121 +0,0 @@ -# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated -from __future__ import annotations - -import os -import pathlib - -import packaging.version -from ptscripts import Context -from rich.progress import ( - BarColumn, - Column, - DownloadColumn, - Progress, - TextColumn, - TimeRemainingColumn, - TransferSpeedColumn, -) - -REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent -GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023" -SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "prod" -NIGHTLY_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-nightly" -STAGING_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-staging" -RELEASE_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-release" -BACKUP_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-backup" - - -class UpdateProgress: - def __init__(self, progress, task): - self.progress = progress - self.task = task - - def __call__(self, chunk_size): - self.progress.update(self.task, advance=chunk_size) - - -def create_progress_bar(file_progress: bool = False, **kwargs): - if file_progress: - return Progress( - TextColumn("[progress.description]{task.description}"), - BarColumn(), - DownloadColumn(), - TransferSpeedColumn(), - TextColumn("eta"), - TimeRemainingColumn(), - **kwargs, - ) - return Progress( - TextColumn( - "[progress.description]{task.description}", table_column=Column(ratio=3) - ), - BarColumn(), - expand=True, - **kwargs, - ) - - -def export_gpg_key(ctx: Context, key_id: str, export_path: pathlib.Path): - keyfile_gpg = export_path.joinpath(GPG_KEY_FILENAME).with_suffix(".gpg") - if keyfile_gpg.exists(): - keyfile_gpg.unlink() - ctx.info(f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg} ...") - ctx.run("gpg", "--output", str(keyfile_gpg), "--export", key_id) - keyfile_pub = export_path.joinpath(GPG_KEY_FILENAME).with_suffix(".pub") - if keyfile_pub.exists(): - keyfile_pub.unlink() - ctx.info(f"Exporting GnuPG Key '{key_id}' to {keyfile_pub} ...") - ctx.run("gpg", "--armor", "--output", str(keyfile_pub), "--export", key_id) - - -def gpg_sign(ctx: Context, key_id: str, path: pathlib.Path): - ctx.info(f"GPG Signing '{path}' ...") - signature_fpath = path.parent / f"{path.name}.asc" - if signature_fpath.exists(): - signature_fpath.unlink() - ctx.run( - "gpg", - "--local-user", - key_id, - "--output", - str(signature_fpath), - "--armor", - "--detach-sign", - "--sign", - str(path), - ) - - -class Version(packaging.version.Version): - def __lt__(self, other): - if not isinstance(other, self.__class__): - other = self.__class__(other) - return super().__lt__(other) - - def __le__(self, other): - if not isinstance(other, self.__class__): - other = self.__class__(other) - return super().__le__(other) - - def __eq__(self, other): - if not isinstance(other, self.__class__): - other = self.__class__(other) - return super().__eq__(other) - - def __ge__(self, other): - if not isinstance(other, self.__class__): - other = self.__class__(other) - return super().__ge__(other) - - def __gt__(self, other): - if not isinstance(other, self.__class__): - other = self.__class__(other) - return super().__gt__(other) - - def __ne__(self, other): - if not isinstance(other, self.__class__): - other = self.__class__(other) - return super().__ne__(other) - - def __hash__(self): - return hash(str(self)) diff --git a/tools/utils/__init__.py b/tools/utils/__init__.py new file mode 100644 index 000000000000..bebc9c98eb34 --- /dev/null +++ b/tools/utils/__init__.py @@ -0,0 +1,261 @@ +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated,bad-whitespace +from __future__ import annotations + +import fnmatch +import hashlib +import json +import os +import pathlib +import shutil +import sys +import tempfile +import zipfile +from datetime import datetime +from enum import IntEnum +from typing import Any + +import boto3 +import packaging.version +from botocore.exceptions import ClientError +from ptscripts import Context +from rich.progress import ( + BarColumn, + Column, + DownloadColumn, + Progress, + TextColumn, + TimeRemainingColumn, + TransferSpeedColumn, +) + +REPO_ROOT = pathlib.Path(__file__).resolve().parent.parent.parent +GPG_KEY_FILENAME = "SALT-PROJECT-GPG-PUBKEY-2023" +SPB_ENVIRONMENT = os.environ.get("SPB_ENVIRONMENT") or "test" +STAGING_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-staging" +RELEASE_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-release" +BACKUP_BUCKET_NAME = f"salt-project-{SPB_ENVIRONMENT}-salt-artifacts-backup" + + +class ExitCode(IntEnum): + OK = 0 + FAIL = 1 + SOFT_FAIL = 2 + + +def create_progress_bar(file_progress: bool = False, **kwargs): + if file_progress: + return Progress( + TextColumn("[progress.description]{task.description}"), + BarColumn(), + DownloadColumn(), + TransferSpeedColumn(), + TextColumn("eta"), + TimeRemainingColumn(), + **kwargs, + ) + return Progress( + TextColumn( + "[progress.description]{task.description}", table_column=Column(ratio=3) + ), + BarColumn(), + expand=True, + **kwargs, + ) + + +def export_gpg_key(ctx: Context, key_id: str, export_path: pathlib.Path): + keyfile_gpg = export_path.joinpath(GPG_KEY_FILENAME).with_suffix(".gpg") + if keyfile_gpg.exists(): + keyfile_gpg.unlink() + ctx.info(f"Exporting GnuPG Key '{key_id}' to {keyfile_gpg} ...") + ctx.run("gpg", "--output", str(keyfile_gpg), "--export", key_id) + keyfile_pub = export_path.joinpath(GPG_KEY_FILENAME).with_suffix(".pub") + if keyfile_pub.exists(): + keyfile_pub.unlink() + ctx.info(f"Exporting GnuPG Key '{key_id}' to {keyfile_pub} ...") + ctx.run("gpg", "--armor", "--output", str(keyfile_pub), "--export", key_id) + + +def gpg_sign(ctx: Context, key_id: str, path: pathlib.Path): + ctx.info(f"GPG Signing '{path}' ...") + signature_fpath = path.parent / f"{path.name}.asc" + if signature_fpath.exists(): + signature_fpath.unlink() + ctx.run( + "gpg", + "--local-user", + key_id, + "--output", + str(signature_fpath), + "--armor", + "--detach-sign", + "--sign", + str(path), + ) + + +class Version(packaging.version.Version): + def __lt__(self, other): + if not isinstance(other, self.__class__): + other = self.__class__(other) + return super().__lt__(other) + + def __le__(self, other): + if not isinstance(other, self.__class__): + other = self.__class__(other) + return super().__le__(other) + + def __eq__(self, other): + if not isinstance(other, self.__class__): + other = self.__class__(other) + return super().__eq__(other) + + def __ge__(self, other): + if not isinstance(other, self.__class__): + other = self.__class__(other) + return super().__ge__(other) + + def __gt__(self, other): + if not isinstance(other, self.__class__): + other = self.__class__(other) + return super().__gt__(other) + + def __ne__(self, other): + if not isinstance(other, self.__class__): + other = self.__class__(other) + return super().__ne__(other) + + def __str__(self): + return super().__str__().replace(".post", "-") + + def __hash__(self): + return hash(str(self)) + + +def get_salt_releases(ctx: Context, repository: str) -> list[Version]: + """ + Return a list of salt versions + """ + # Deferred import + import tools.utils.gh + + ctx.info(f"Collecting salt releases from repository '{repository}'") + + versions = set() + with ctx.web as web: + headers = { + "Accept": "application/vnd.github+json", + } + github_token = tools.utils.gh.get_github_token(ctx) + if github_token is not None: + headers["Authorization"] = f"Bearer {github_token}" + web.headers.update(headers) + ret = web.get(f"https://api.github.com/repos/{repository}/tags") + if ret.status_code != 200: + ctx.error( + f"Failed to get the tags for repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + for tag in ret.json(): + name = tag["name"] + if name.startswith("v"): + name = name[1:] + if "docs" in name: + # We're not going to consider doc tags + continue + versions.add(Version(name)) + + # Now let's go through the github releases + ret = web.get(f"https://api.github.com/repos/{repository}/releases") + if ret.status_code != 200: + ctx.error( + f"Failed to get the releases for repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + for release in ret.json(): + name = release["name"] + if name.startswith("v"): + name = name[1:] + if name and "docs" not in name: + # We're not going to parse docs releases + versions.add(Version(name)) + name = release["tag_name"] + if "docs" not in name: + # We're not going to parse docs releases + versions.add(Version(name)) + return sorted(versions) + + +def parse_versions(*versions: str) -> list[Version]: + _versions = [] + for version in set(versions): + if version == "latest": + continue + _versions.append(Version(version)) + if _versions: + _versions.sort(reverse=True) + return _versions + + +def get_file_checksum(fpath: pathlib.Path, hash_name: str) -> str: + with fpath.open("rb") as rfh: + try: + digest = hashlib.file_digest(rfh, hash_name) # type: ignore[attr-defined] + except AttributeError: + # Python < 3.11 + buf = bytearray(2**18) # Reusable buffer to reduce allocations. + view = memoryview(buf) + digest = getattr(hashlib, hash_name)() + while True: + size = rfh.readinto(buf) + if size == 0: + break # EOF + digest.update(view[:size]) + hexdigest: str = digest.hexdigest() + return hexdigest + + +def download_file( + ctx: Context, + url: str, + dest: pathlib.Path, + auth: tuple[str, str] | None = None, + headers: dict[str, str] | None = None, +) -> pathlib.Path: + ctx.info(f"Downloading {dest.name!r} @ {url} ...") + curl = shutil.which("curl") + if curl is not None: + command = [curl, "-sS", "-L"] + if headers: + for key, value in headers.items(): + command.extend(["-H", f"{key}: {value}"]) + command.extend(["-o", str(dest), url]) + ret = ctx.run(*command) + if ret.returncode: + ctx.error(f"Failed to download {url}") + ctx.exit(1) + return dest + wget = shutil.which("wget") + if wget is not None: + with ctx.chdir(dest.parent): + command = [wget, "--no-verbose"] + if headers: + for key, value in headers.items(): + command.append(f"--header={key}: {value}") + command.append(url) + ret = ctx.run(*command) + if ret.returncode: + ctx.error(f"Failed to download {url}") + ctx.exit(1) + return dest + # NOTE the stream=True parameter below + with ctx.web as web: + if headers: + web.headers.update(headers) + with web.get(url, stream=True, auth=auth) as r: + r.raise_for_status() + with dest.open("wb") as f: + for chunk in r.iter_content(chunk_size=8192): + if chunk: + f.write(chunk) + return dest diff --git a/tools/utils/gh.py b/tools/utils/gh.py new file mode 100644 index 000000000000..513e2cf2110c --- /dev/null +++ b/tools/utils/gh.py @@ -0,0 +1,403 @@ +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated,bad-whitespace +from __future__ import annotations + +import fnmatch +import os +import pathlib +import shutil +import sys +import tempfile +import zipfile +from typing import TYPE_CHECKING + +from ptscripts import Context + +import tools.utils +from tools.utils import ExitCode + + +def download_onedir_artifact( + ctx: Context, + run_id: int = None, + platform: str = None, + arch: str = "x86_64", + repository: str = "saltstack/salt", +) -> int: + """ + Download CI onedir artifacts. + """ + if TYPE_CHECKING: + assert run_id is not None + assert platform is not None + + if platform == "windows": + if arch in ("x64", "x86_64"): + ctx.info(f"Turning passed arch {arch!r} into 'amd64'") + arch = "amd64" + if arch not in ("amd64", "x86"): + ctx.error( + "The allowed values for '--arch' on Windows are 'amd64' and 'x86'" + ) + return ExitCode.FAIL + else: + if arch == "arm64": + ctx.info(f"Turning passed arch {arch!r} into 'aarch64'") + arch = "aarch64" + elif arch == "x64": + ctx.info(f"Turning passed arch {arch!r} into 'x86_64'") + arch = "x86_64" + if arch not in ("x86_64", "aarch64"): + ctx.error( + f"The allowed values for '--arch' on {platform.title()} are 'x86_64', 'aarch64' or 'arm64'" + ) + return ExitCode.FAIL + artifacts_path = tools.utils.REPO_ROOT / "artifacts" + artifacts_path.mkdir(exist_ok=True) + if artifacts_path.joinpath("salt").exists(): + ctx.warn( + "The 'artifacts/salt' directory already exists ... Stopped processing." + ) + return ExitCode.SOFT_FAIL + artifact_name = f"salt-*-onedir-{platform}-{arch}" + if sys.platform.startswith("win"): + artifact_name += ".zip" + else: + artifact_name += ".tar.xz" + ctx.info( + f"Searching for artifact {artifact_name} from run_id {run_id} in repository {repository} ..." + ) + found_artifact_name = download_artifact( + ctx, + dest=artifacts_path, + run_id=run_id, + artifact_name=artifact_name, + repository=repository, + ) + if found_artifact_name is None: + return ExitCode.FAIL + found_artifact_path = artifacts_path / found_artifact_name + checksum_algo = "sha512" + ctx.info(f"Validating {found_artifact_name!r} {checksum_algo} checksum ...") + artifact_expected_checksum = ( + artifacts_path.joinpath(f"{found_artifact_name}.{checksum_algo.upper()}") + .read_text() + .strip() + ) + artifact_checksum = tools.utils.get_file_checksum( + found_artifact_path, checksum_algo + ) + if artifact_expected_checksum != artifact_checksum: + ctx.error(f"The {checksum_algo} checksum does not match") + ctx.error(f"{artifact_checksum!r} != {artifact_expected_checksum!r}") + return ExitCode.FAIL + + ctx.info( + f"Decompressing {found_artifact_name!r} to {artifacts_path.relative_to(tools.utils.REPO_ROOT)}{os.path.sep} ..." + ) + if found_artifact_path.suffix == ".zip": + with zipfile.ZipFile(found_artifact_path) as zfile: + zfile.extractall(path=artifacts_path) + else: + ctx.run("tar", "xf", found_artifact_name, cwd=artifacts_path) + + return ExitCode.OK + + +def download_nox_artifact( + ctx: Context, + run_id: int = None, + slug: str = None, + nox_env: str = "ci-test-onedir", + repository: str = "saltstack/salt", +) -> ExitCode: + """ + Download CI nox artifacts. + """ + if TYPE_CHECKING: + assert run_id is not None + assert slug is not None + + artifacts_path = tools.utils.REPO_ROOT / ".nox" / nox_env + if artifacts_path.exists(): + ctx.error( + f"The '.nox/{nox_env}' directory already exists ... Stopped processing." + ) + return ExitCode.SOFT_FAIL + artifact_name = f"nox-{slug}-{nox_env}" + ctx.info( + f"Searching for artifact {artifact_name} from run_id {run_id} in repository {repository} ..." + ) + found_artifact_name = download_artifact( + ctx, + dest=tools.utils.REPO_ROOT, + run_id=run_id, + artifact_name=artifact_name, + repository=repository, + ) + nox = shutil.which("nox") + if nox is None: + ctx.error("Could not find the 'nox' binary in $PATH") + return ExitCode.FAIL + ret = ctx.run( + nox, "--force-color", "-e", "decompress-dependencies", "--", slug, check=False + ) + if ret.returncode: + ctx.error("Failed to decompress the nox dependencies") + return ExitCode.FAIL + return ExitCode.OK + + +def download_pkgs_artifact( + ctx: Context, + run_id: int = None, + slug: str = None, + arch: str = "x86_64", + repository: str = "saltstack/salt", +) -> ExitCode: + """ + Download CI nox artifacts. + """ + if TYPE_CHECKING: + assert run_id is not None + assert slug is not None + + artifact_name = "salt-*-" + if "windows" in slug: + if arch in ("x64", "x86_64"): + ctx.info(f"Turning passed arch {arch!r} into 'amd64'") + arch = "amd64" + if arch not in ("amd64", "x86"): + ctx.error( + "The allowed values for '--arch' on Windows are 'amd64' and 'x86'" + ) + return ExitCode.FAIL + artifact_name += f"{arch}-MSI" + else: + if arch == "arm64": + ctx.info(f"Turning passed arch {arch!r} into 'aarch64'") + arch = "aarch64" + elif arch == "x64": + ctx.info(f"Turning passed arch {arch!r} into 'x86_64'") + arch = "x86_64" + if arch not in ("x86_64", "aarch64"): + ctx.error( + f"The allowed values for '--arch' for {slug} are 'x86_64', 'aarch64' or 'arm64'" + ) + return ExitCode.FAIL + + if slug.startswith(("debian", "ubuntu")): + artifact_name += f"{arch}-deb" + elif slug.startswith( + ("almalinux", "amazonlinux", "centos", "fedora", "opensuse", "photonos") + ): + artifact_name += f"{arch}-rpm" + else: + ctx.error(f"We do not build packages for {slug}") + return ExitCode.FAIL + + artifacts_path = tools.utils.REPO_ROOT / "artifacts" / "pkg" + artifacts_path.mkdir(exist_ok=True) + + ctx.info( + f"Searching for artifact {artifact_name} from run_id {run_id} in repository {repository} ..." + ) + found_artifact_name = download_artifact( + ctx, + dest=artifacts_path, + run_id=run_id, + artifact_name=artifact_name, + repository=repository, + ) + if found_artifact_name is None: + return ExitCode.FAIL + return ExitCode.OK + + +def get_github_token(ctx: Context) -> str | None: + """ + Get the GITHUB_TOKEN to be able to authenticate to the API. + """ + github_token = os.environ.get("GITHUB_TOKEN") + if github_token is not None: + ctx.info("$GITHUB_TOKEN was found on the environ") + return github_token + + gh = shutil.which("gh") + if gh is None: + ctx.info("The 'gh' CLI tool is not available. Can't get a token using it.") + return github_token + + ret = ctx.run(gh, "auth", "token", check=False, capture=True) + if ret.returncode == 0: + ctx.info("Got the GitHub token from the 'gh' CLI tool") + return ret.stdout.decode().strip() or None + ctx.info("Failed to get the GitHub token from the 'gh' CLI tool") + return github_token + + +def download_artifact( + ctx: Context, + dest: pathlib.Path, + run_id: int, + repository: str = "saltstack/salt", + artifact_name: str | None = None, +) -> str | None: + """ + Download CI artifacts. + """ + found_artifact: str | None = None + github_token = get_github_token(ctx) + if github_token is None: + ctx.error("Downloading artifacts requires being authenticated to GitHub.") + ctx.info( + "Either set 'GITHUB_TOKEN' to a valid token, or configure the 'gh' tool such that " + "'gh auth token' returns a token." + ) + return found_artifact + with ctx.web as web: + headers = { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {github_token}", + "X-GitHub-Api-Version": "2022-11-28", + } + web.headers.update(headers) + page = 0 + listed_artifacts: set[str] = set() + while True: + if found_artifact is not None: + break + page += 1 + params = { + "per_page": 100, + "page": page, + } + ret = web.get( + f"https://api.github.com/repos/{repository}/actions/runs/{run_id}/artifacts", + params=params, + ) + if ret.status_code != 200: + ctx.error( + f"Failed to get the artifacts for the run ID {run_id} for repository {repository!r}: {ret.reason}" + ) + ctx.exit(1) + data = ret.json() + if data["total_count"] <= len(listed_artifacts): + ctx.info("Already gone through all of the listed artifacts:") + ctx.print(sorted(listed_artifacts)) + break + ctx.debug(f"Processing artifacts listing (page: {page}) ...") + if not data["artifacts"]: + break + for artifact in data["artifacts"]: + listed_artifacts.add(artifact["name"]) + ctx.debug( + f"Checking if {artifact['name']!r} matches {artifact_name!r} " + f"({len(listed_artifacts)}/{data['total_count']}) ..." + ) + if fnmatch.fnmatch(artifact["name"], artifact_name): + found_artifact = artifact["name"] + tempdir_path = pathlib.Path(tempfile.gettempdir()) + download_url = artifact["archive_download_url"] + downloaded_artifact = tools.utils.download_file( + ctx, + download_url, + tempdir_path / f"{artifact['name']}.zip", + headers=headers, + ) + ctx.info(f"Downloaded {downloaded_artifact}") + with zipfile.ZipFile(downloaded_artifact) as zfile: + zfile.extractall(path=dest) + break + if found_artifact is None: + ctx.error(f"Failed to find an artifact by the name of {artifact_name!r}") + return found_artifact + + +def discover_run_id( + ctx: Context, + branch: str = None, + nightly: str = None, + pr: int = None, + repository: str = "saltstack/salt", + completed_status: bool = True, +) -> int | None: + ctx.info(f"Discovering the run_id({branch=}, {nightly=}, {pr=}, {repository=})") + run_id: int | None = None + with ctx.web as web: + headers = { + "Accept": "application/vnd.github+json", + "X-GitHub-Api-Version": "2022-11-28", + } + github_token = get_github_token(ctx) + if github_token is not None: + headers["Authorization"] = f"Bearer {github_token}" + web.headers.update(headers) + + params: dict[str, str | int] = { + "per_page": 100, + } + if completed_status is True: + params["status"] = "completed" + if branch is not None: + ret = web.get( + f"https://api.github.com/repos/{repository}/git/ref/heads/{branch}" + ) + data = ret.json() + if "message" in data: + ctx.error(f"Could not find HEAD commit for branch {branch}") + ctx.exit(1) + params["event"] = "push" + head_sha = data["object"]["sha"] + elif pr is not None: + ret = web.get(f"https://api.github.com/repos/{repository}/pulls/{pr}") + data = ret.json() + params["event"] = "pull_request" + head_sha = data["head"]["sha"] + elif nightly == "master": + ret = web.get( + f"https://api.github.com/repos/{repository}/git/ref/heads/{nightly}" + ) + data = ret.json() + if "message" in data: + ctx.error(f"Could not find HEAD commit for branch {nightly}") + ctx.exit(1) + params["event"] = "schedule" + head_sha = data["object"]["sha"] + else: + ret = web.get( + f"https://api.github.com/repos/{repository}/git/ref/heads/{nightly}" + ) + data = ret.json() + if "message" in data: + ctx.error(f"Could not find HEAD commit for branch {nightly}") + ctx.exit(1) + params["event"] = "workflow_dispatch" + head_sha = data["object"]["sha"] + + params["head_sha"] = head_sha + # params.pop("event") + ctx.info(f"Searching for workflow runs for HEAD SHA: {head_sha}") + page = 0 + while True: + if run_id is not None: + break + page += 1 + params["page"] = page + ret = web.get( + f"https://api.github.com/repos/{repository}/actions/runs", params=params + ) + data = ret.json() + ctx.info( + f"Discovered {data['total_count']} workflow runs for HEAD SHA {head_sha}" + ) + # ctx.info(data) + if not data["workflow_runs"]: + break + workflow_runs = data["workflow_runs"] + for workflow_run in workflow_runs: + run_id = workflow_run["id"] + break + + if run_id: + ctx.info(f"Discovered run_id: {run_id}") + return run_id diff --git a/tools/utils/repo.py b/tools/utils/repo.py new file mode 100644 index 000000000000..075f86ab0c9c --- /dev/null +++ b/tools/utils/repo.py @@ -0,0 +1,133 @@ +# pylint: disable=resource-leakage,broad-except,3rd-party-module-not-gated,bad-whitespace +from __future__ import annotations + +import json +import pathlib +import sys +from datetime import datetime +from typing import Any + +from ptscripts import Context + +import tools.utils + +try: + import boto3 + from botocore.exceptions import ClientError +except ImportError: + print( + "\nPlease run 'python -m pip install -r " + "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), + file=sys.stderr, + flush=True, + ) + raise + + +class UpdateProgress: + def __init__(self, progress, task): + self.progress = progress + self.task = task + + def __call__(self, chunk_size): + self.progress.update(self.task, advance=chunk_size) + + +def get_repo_json_file_contents( + ctx: Context, + bucket_name: str, + repo_path: pathlib.Path, + repo_json_path: pathlib.Path, +) -> dict[str, Any]: + s3 = boto3.client("s3") + repo_json: dict[str, Any] = {} + try: + ret = s3.head_object( + Bucket=bucket_name, Key=str(repo_json_path.relative_to(repo_path)) + ) + ctx.info( + f"Downloading existing '{repo_json_path.relative_to(repo_path)}' file " + f"from bucket {bucket_name}" + ) + size = ret["ContentLength"] + with repo_json_path.open("wb") as wfh: + with tools.utils.create_progress_bar(file_progress=True) as progress: + task = progress.add_task(description="Downloading...", total=size) + s3.download_fileobj( + Bucket=bucket_name, + Key=str(repo_json_path.relative_to(repo_path)), + Fileobj=wfh, + Callback=UpdateProgress(progress, task), + ) + with repo_json_path.open() as rfh: + repo_json = json.load(rfh) + except ClientError as exc: + if "Error" not in exc.response: + raise + if exc.response["Error"]["Code"] != "404": + raise + ctx.info(f"Could not find {repo_json_path} in bucket {bucket_name}") + if repo_json: + ctx.print(repo_json, soft_wrap=True) + return repo_json + + +def create_top_level_repo_path( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + distro: str, + distro_version: str | None = None, # pylint: disable=bad-whitespace + distro_arch: str | None = None, # pylint: disable=bad-whitespace + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace +): + create_repo_path = repo_path + if nightly_build_from: + create_repo_path = ( + create_repo_path + / "salt-dev" + / nightly_build_from + / datetime.utcnow().strftime("%Y-%m-%d") + ) + create_repo_path.mkdir(exist_ok=True, parents=True) + with ctx.chdir(create_repo_path.parent): + latest_nightly_symlink = pathlib.Path("latest") + if not latest_nightly_symlink.exists(): + ctx.info( + f"Creating 'latest' symlink to '{create_repo_path.relative_to(repo_path)}' ..." + ) + latest_nightly_symlink.symlink_to( + create_repo_path.name, target_is_directory=True + ) + elif "rc" in salt_version: + create_repo_path = create_repo_path / "salt_rc" + create_repo_path = create_repo_path / "salt" / "py3" / distro + if distro_version: + create_repo_path = create_repo_path / distro_version + if distro_arch: + create_repo_path = create_repo_path / distro_arch + create_repo_path.mkdir(exist_ok=True, parents=True) + return create_repo_path + + +def create_full_repo_path( + ctx: Context, + repo_path: pathlib.Path, + salt_version: str, + distro: str, + distro_version: str | None = None, # pylint: disable=bad-whitespace + distro_arch: str | None = None, # pylint: disable=bad-whitespace + nightly_build_from: str | None = None, # pylint: disable=bad-whitespace +): + create_repo_path = create_top_level_repo_path( + ctx, + repo_path, + salt_version, + distro, + distro_version, + distro_arch, + nightly_build_from=nightly_build_from, + ) + create_repo_path = create_repo_path / "minor" / salt_version + create_repo_path.mkdir(exist_ok=True, parents=True) + return create_repo_path diff --git a/tools/vm.py b/tools/vm.py index a34c7bf7e5f8..11eed3f99189 100644 --- a/tools/vm.py +++ b/tools/vm.py @@ -21,33 +21,22 @@ from functools import lru_cache from typing import TYPE_CHECKING, cast +import attr +import boto3 +from botocore.exceptions import ClientError from ptscripts import Context, command_group from requests.exceptions import ConnectTimeout +from rich.progress import ( + BarColumn, + Column, + Progress, + TaskProgressColumn, + TextColumn, + TimeRemainingColumn, +) import tools.utils -try: - import attr - import boto3 - from botocore.exceptions import ClientError - from rich.progress import ( - BarColumn, - Column, - Progress, - TaskProgressColumn, - TextColumn, - TimeRemainingColumn, - ) -except ImportError: - print( - "\nPlease run 'python -m pip install -r " - "requirements/static/ci/py{}.{}/tools.txt'\n".format(*sys.version_info), - file=sys.stderr, - flush=True, - ) - raise - - if TYPE_CHECKING: # pylint: disable=no-name-in-module from boto3.resources.factory.ec2 import Instance @@ -78,7 +67,7 @@ "choices": list(AMIS), }, "key_name": { - "help": "The SSH key name.", + "help": "The SSH key name. Will default to TOOLS_KEY_NAME in environment", }, "instance_type": { "help": "The instance type to use.", @@ -110,7 +99,7 @@ def create( ctx: Context, name: str, - key_name: str = os.environ.get("RUNNER_NAME"), # type: ignore[assignment] + key_name: str = os.environ.get("RUNNER_NAME") or os.environ.get("TOOLS_KEY_NAME"), # type: ignore[assignment] instance_type: str = None, no_delete: bool = False, no_destroy_on_failure: bool = False, @@ -175,14 +164,20 @@ def create( "help": "The VM Name", "metavar": "VM_NAME", }, + "no_wait": { + "help": ( + "Don't wait for the destroy process to complete. " + "Just confirm it started and exit." + ) + }, } ) -def destroy(ctx: Context, name: str): +def destroy(ctx: Context, name: str, no_wait: bool = False): """ Destroy VM. """ vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) - vm.destroy() + vm.destroy(no_wait=no_wait) @vm.command( @@ -216,14 +211,18 @@ def ssh(ctx: Context, name: str, command: list[str], sudo: bool = False): "help": "The VM Name", "metavar": "VM_NAME", }, + "download": { + "help": "Rsync from the remote target to local salt checkout", + "action": "store_true", + }, } ) -def rsync(ctx: Context, name: str): +def rsync(ctx: Context, name: str, download: bool = False): """ Sync local checkout to VM. """ vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) - vm.upload_checkout() + vm.upload_checkout(download=download) @vm.command( @@ -272,6 +271,17 @@ def rsync(ctx: Context, name: str): "--skip-code-coverage", ], }, + "envvars": { + "action": "append", + "flags": [ + "-E", + "--env", + ], + "help": ( + "Environment variable name to forward when running tests. Example: " + "'-E VAR1 -E VAR2'." + ), + }, } ) def test( @@ -284,6 +294,8 @@ def test( print_tests_selection: bool = False, print_system_info: bool = False, skip_code_coverage: bool = False, + envvars: list[str] = None, + fips: bool = False, ): """ Run test in the VM. @@ -293,6 +305,7 @@ def test( "PRINT_TEST_PLAN_ONLY": "0", "SKIP_INITIAL_ONEDIR_FAILURES": "1", "SKIP_INITIAL_GH_ACTIONS_FAILURES": "1", + "COVERAGE_CONTEXT": name, } if "LANG" in os.environ: env["LANG"] = os.environ["LANG"] @@ -318,6 +331,15 @@ def test( if "photonos" in name: skip_known_failures = os.environ.get("SKIP_INITIAL_PHOTONOS_FAILURES", "1") env["SKIP_INITIAL_PHOTONOS_FAILURES"] = skip_known_failures + if fips: + env["FIPS_TESTRUN"] = "1" + vm.run(["tdnf", "install", "-y", "openssl-fips-provider"], sudo=True) + if envvars: + for key in envvars: + if key not in os.environ: + ctx.warn(f"Environment variable {key!r} not set. Not forwarding") + continue + env[key] = os.environ[key] returncode = vm.run_nox( nox_session=nox_session, session_args=nox_session_args, @@ -352,6 +374,17 @@ def test( "--skip-requirements-install", ], }, + "envvars": { + "action": "append", + "flags": [ + "-E", + "--env", + ], + "help": ( + "Environment variable name to forward when running tests. Example: " + "'-E VAR1 -E VAR2'." + ), + }, } ) def testplan( @@ -360,6 +393,7 @@ def testplan( nox_session_args: list[str] = None, nox_session: str = "ci-test-3", skip_requirements_install: bool = False, + envvars: list[str] = None, ): """ Run test in the VM. @@ -379,6 +413,12 @@ def testplan( if "photonos" in name: skip_known_failures = os.environ.get("SKIP_INITIAL_PHOTONOS_FAILURES", "1") env["SKIP_INITIAL_PHOTONOS_FAILURES"] = skip_known_failures + if envvars: + for key in envvars: + if key not in os.environ: + ctx.warn(f"Environment variable {key!r} not set. Not forwarding") + continue + env[key] = os.environ[key] returncode = vm.run_nox( nox_session=nox_session, session_args=nox_session_args, @@ -504,6 +544,24 @@ def combine_coverage(ctx: Context, name: str): ctx.exit(returncode) +@vm.command( + name="create-xml-coverage-reports", + arguments={ + "name": { + "help": "The VM Name", + "metavar": "VM_NAME", + }, + }, +) +def create_xml_coverage_reports(ctx: Context, name: str): + """ + Create XML code coverage reports in the VM. + """ + vm = VM(ctx=ctx, name=name, region_name=ctx.parser.options.region) + returncode = vm.create_xml_coverage_reports() + ctx.exit(returncode) + + @vm.command( name="download-artifacts", arguments={ @@ -521,6 +579,156 @@ def download_artifacts(ctx: Context, name: str): vm.download_artifacts() +@vm.command( + name="sync-cache", + arguments={ + "key_name": { + "help": "The SSH key name. Will default to TOOLS_KEY_NAME in environment" + }, + "delete": { + "help": "Delete the entries in the cache that don't align with ec2", + "action": "store_true", + }, + }, +) +def sync_cache( + ctx: Context, + key_name: str = os.environ.get("RUNNER_NAME") or os.environ.get("TOOLS_KEY_NAME"), # type: ignore[assignment] + delete: bool = False, +): + """ + Sync the cache + """ + ec2_instances = _filter_instances_by_state( + _get_instances_by_key(ctx, key_name), + {"running"}, + ) + + cached_instances = {} + if STATE_DIR.exists(): + for state_path in STATE_DIR.iterdir(): + try: + instance_id = (state_path / "instance-id").read_text() + except FileNotFoundError: + if not delete: + log.info( + f"Would remove {state_path.name} (No valid ID) from cache at {state_path}" + ) + else: + shutil.rmtree(state_path) + log.info( + f"REMOVED {state_path.name} (No valid ID) from cache at {state_path}" + ) + else: + cached_instances[instance_id] = state_path.name + + # Find what instances we are missing in our cached states + to_write = {} + to_remove = cached_instances.copy() + for instance in ec2_instances: + if instance.id not in cached_instances: + for tag in instance.tags: + if tag.get("Key") == "vm-name": + to_write[tag.get("Value")] = instance + break + else: + del to_remove[instance.id] + + for cached_id, vm_name in to_remove.items(): + if delete: + shutil.rmtree(STATE_DIR / vm_name) + log.info( + f"REMOVED {vm_name} ({cached_id.strip()}) from cache at {STATE_DIR / vm_name}" + ) + else: + log.info( + f"Would remove {vm_name} ({cached_id.strip()}) from cache at {STATE_DIR / vm_name}" + ) + if not delete and to_remove: + log.info("To force the removal of the above cache entries, pass --delete") + + for name_tag, vm_instance in to_write.items(): + vm_write = VM(ctx=ctx, name=name_tag, region_name=ctx.parser.options.region) + vm_write.instance = vm_instance + vm_write.write_state() + + +@vm.command( + name="list", + arguments={ + "key_name": { + "help": "The SSH key name. Will default to TOOLS_KEY_NAME in environment" + }, + "states": { + "help": "The instance state to filter by.", + "flags": ["-s", "-state"], + "action": "append", + }, + }, +) +def list_vms( + ctx: Context, + key_name: str = os.environ.get("RUNNER_NAME") or os.environ.get("TOOLS_KEY_NAME"), # type: ignore[assignment] + states: set[str] = None, +): + """ + List the vms associated with the given key. + """ + instances = _filter_instances_by_state( + _get_instances_by_key(ctx, key_name), + states, + ) + + for instance in instances: + vm_state = instance.state["Name"] + ip_addr = instance.private_ip_address + ami = instance.image_id + vm_name = None + for tag in instance.tags: + if tag.get("Key") == "vm-name": + vm_name = tag.get("Value") + break + + if vm_name is not None: + sep = "\n " + extra_info = { + "IP": ip_addr, + "AMI": ami, + } + extras = sep + sep.join( + [f"{key}: {value}" for key, value in extra_info.items()] + ) + log.info(f"{vm_name} ({vm_state}){extras}") + + +def _get_instances_by_key(ctx: Context, key_name: str): + if key_name is None: + ctx.exit(1, "We need a key name to filter the instances by.") + ec2 = boto3.resource("ec2", region_name=ctx.parser.options.region) + # First let's get the instances on AWS associated with the key given + filters = [ + {"Name": "key-name", "Values": [key_name]}, + ] + try: + instances = list( + ec2.instances.filter( + Filters=filters, + ) + ) + except ClientError as exc: + if "RequestExpired" not in str(exc): + raise + ctx.error(str(exc)) + ctx.exit(1) + return instances + + +def _filter_instances_by_state(instances: list[Instance], states: set[str] | None): + if states is None: + return instances + return [instance for instance in instances if instance.state["Name"] in states] + + @attr.s(frozen=True, kw_only=True) class AMIConfig: ami: str = attr.ib() @@ -588,6 +796,11 @@ def read_state(self): self.ctx.error(str(exc)) self.ctx.exit(1) instance_id_path.unlink() + except AttributeError: + # This machine no longer exists?! + instance_id_path.unlink() + self.ctx.info("It appears the cached image no longer exists...") + self.ctx.exit(1) if not instance_id_path.exists(): filters = [ {"Name": "tag:vm-name", "Values": [self.name]}, @@ -623,15 +836,23 @@ def write_state(self): def write_ssh_config(self): if self.ssh_config_file.exists(): - return + if ( + f"Hostname {self.instance.private_ip_address}" + in self.ssh_config_file.read_text() + ): + # If what's on config matches, then we're good + return if os.environ.get("CI") is not None: forward_agent = "no" else: forward_agent = "yes" + ciphers = "" + if "photonos" in self.name: + ciphers = "Ciphers=aes256-gcm@openssh.com,aes256-cbc,aes256-ctr,chacha20-poly1305@openssh.com,aes128-ctr,aes192-ctr,aes128-gcm@openssh.com" ssh_config = textwrap.dedent( f"""\ Host {self.name} - Hostname {self.instance.public_ip_address or self.instance.private_ip_address} + Hostname {self.instance.private_ip_address} User {self.config.ssh_username} ControlMaster=no Compression=yes @@ -639,6 +860,8 @@ def write_ssh_config(self): StrictHostKeyChecking=no UserKnownHostsFile=/dev/null ForwardAgent={forward_agent} + PasswordAuthentication=no + {ciphers} """ ) self.ssh_config_file.write_text(ssh_config) @@ -656,7 +879,7 @@ def create( self.get_ec2_resource.cache_clear() if environment is None: - environment = "prod" + environment = tools.utils.SPB_ENVIRONMENT create_timeout = self.config.create_timeout create_timeout_progress = 0 @@ -673,41 +896,50 @@ def create( client = boto3.client("ec2", region_name=self.region_name) # Let's search for the launch template corresponding to this AMI launch_template_name = None + next_token = "" try: - response = response = client.describe_launch_templates( - Filters=[ - { - "Name": "tag:spb:is-golden-image-template", - "Values": ["true"], - }, - { - "Name": "tag:spb:project", - "Values": ["salt-project"], - }, - { - "Name": "tag:spb:environment", - "Values": [environment], - }, - { - "Name": "tag:spb:image-id", - "Values": [self.config.ami], - }, - ] - ) - log.debug( - "Search for launch template response:\n%s", pprint.pformat(response) - ) - for details in response.get("LaunchTemplates"): + while True: + response = response = client.describe_launch_templates( + Filters=[ + { + "Name": "tag:spb:is-golden-image-template", + "Values": ["true"], + }, + { + "Name": "tag:spb:project", + "Values": ["salt-project"], + }, + { + "Name": "tag:spb:environment", + "Values": [environment], + }, + { + "Name": "tag:spb:image-id", + "Values": [self.config.ami], + }, + ], + NextToken=next_token, + ) + log.debug( + "Search for launch template response:\n%s", + pprint.pformat(response), + ) + for details in response.get("LaunchTemplates"): + if launch_template_name is not None: + log.warning( + "Multiple launch templates for the same AMI. This is not " + "supposed to happen. Picked the first one listed: %s", + response, + ) + break + launch_template_name = details["LaunchTemplateName"] + if launch_template_name is not None: - log.warning( - "Multiple launch templates for the same AMI. This is not " - "supposed to happen. Picked the first one listed: %s", - response, - ) break - launch_template_name = details["LaunchTemplateName"] - if launch_template_name is None: + next_token = response.get("NextToken") + if next_token: + continue self.ctx.error(f"Could not find a launch template for {self.name!r}") self.ctx.exit(1) except ClientError as exc: @@ -738,11 +970,7 @@ def create( if tag["Key"] != "Name": continue private_value = f"-{environment}-vpc-private-" - if started_in_ci and private_value in tag["Value"]: - subnets[subnet.id] = subnet.available_ip_address_count - break - public_value = f"-{environment}-vpc-public-" - if started_in_ci is False and public_value in tag["Value"]: + if private_value in tag["Value"]: subnets[subnet.id] = subnet.available_ip_address_count break if subnets: @@ -756,8 +984,7 @@ def create( log.info("Starting CI configured VM") else: # This is a developer running - log.info("Starting Developer configured VM") - # Get the develpers security group + log.info(f"Starting Developer configured VM In Environment '{environment}'") security_group_filters = [ { "Name": "vpc-id", @@ -767,10 +994,6 @@ def create( "Name": "tag:spb:project", "Values": ["salt-project"], }, - { - "Name": "tag:spb:developer", - "Values": ["true"], - }, ] response = client.describe_security_groups(Filters=security_group_filters) if not response.get("SecurityGroups"): @@ -781,6 +1004,26 @@ def create( self.ctx.exit(1) # Override the launch template network interfaces config security_group_ids = [sg["GroupId"] for sg in response["SecurityGroups"]] + security_group_filters = [ + { + "Name": "vpc-id", + "Values": [vpc.id], + }, + { + "Name": "tag:Name", + "Values": [f"saltproject-{environment}-client-vpn-remote-access"], + }, + ] + response = client.describe_security_groups(Filters=security_group_filters) + if not response.get("SecurityGroups"): + self.ctx.error( + "Could not find the right VPN access security group. " + f"Filters:\n{pprint.pformat(security_group_filters)}" + ) + self.ctx.exit(1) + security_group_ids.extend( + [sg["GroupId"] for sg in response["SecurityGroups"]] + ) progress = create_progress_bar() create_task = progress.add_task( @@ -914,7 +1157,7 @@ def create( return error # Wait until we can SSH into the VM - host = self.instance.public_ip_address or self.instance.private_ip_address + host = self.instance.private_ip_address progress = create_progress_bar() connect_task = progress.add_task( @@ -925,6 +1168,7 @@ def create( proc = None checks = 0 last_error = None + connection_refused_or_reset = False while ssh_connection_timeout_progress <= ssh_connection_timeout: start = time.time() if proc is None: @@ -964,6 +1208,11 @@ def create( break proc.wait(timeout=3) stderr = proc.stderr.read().strip() + if connection_refused_or_reset is False and ( + "connection refused" in stderr.lower() + or "connection reset" in stderr.lower() + ): + connection_refused_or_reset = True if stderr: stderr = f" Last Error: {stderr}" last_error = stderr @@ -983,6 +1232,12 @@ def create( description=f"Waiting for SSH to become available at {host} ...{stderr or ''}", ) + if connection_refused_or_reset: + # Since ssh is now running, and we're actually getting a connection + # refused error message, let's try to ssh a little slower in order not + # to get blocked + time.sleep(10) + if checks >= 10 and proc is not None: proc.kill() proc = None @@ -993,21 +1248,27 @@ def create( return error return True - def destroy(self): + def destroy(self, no_wait: bool = False): try: if not self.is_running: log.info(f"{self!r} is not running...") return timeout = self.config.terminate_timeout - timeout_progress = 0 + timeout_progress = 0.0 progress = create_progress_bar() - task = progress.add_task(f"Terminatting {self!r}...", total=timeout) + task = progress.add_task(f"Terminating {self!r}...", total=timeout) self.instance.terminate() try: with progress: while timeout_progress <= timeout: start = time.time() time.sleep(1) + if no_wait and not self.is_running: + log.info( + f"{self!r} started the destroy process. " + "Not waiting for completion of that process." + ) + break if self.state == "terminated": progress.update( task, @@ -1033,7 +1294,7 @@ def destroy(self): shutil.rmtree(self.state_dir, ignore_errors=True) self.instance = None - def upload_checkout(self, verbose=True): + def upload_checkout(self, verbose=True, download=False): rsync_flags = [ "--delete", "--no-group", @@ -1041,6 +1302,8 @@ def upload_checkout(self, verbose=True): "--exclude", ".nox/", "--exclude", + ".tools-venvs/", + "--exclude", ".pytest_cache/", "--exclude", f"{STATE_DIR.relative_to(tools.utils.REPO_ROOT)}{os.path.sep}", @@ -1052,7 +1315,7 @@ def upload_checkout(self, verbose=True): "--include", "artifacts/salt", "--include", - "pkg/artifacts/*", + "artifacts/pkg", # But we also want to exclude all other entries under artifacts/ "--exclude", "artifacts/*", @@ -1066,14 +1329,19 @@ def upload_checkout(self, verbose=True): # Remote repo path remote_path = self.upload_path.as_posix() rsync_remote_path = remote_path - if self.is_windows: + if sys.platform == "win32": for drive in ("c:", "C:"): source = source.replace(drive, "/cygdrive/c") - rsync_remote_path = rsync_remote_path.replace(drive, "/cygdrive/c") source = source.replace("\\", "/") + if self.is_windows: + for drive in ("c:", "C:"): + rsync_remote_path = rsync_remote_path.replace(drive, "/cygdrive/c") destination = f"{self.name}:{rsync_remote_path}" description = "Rsync local checkout to VM..." - self.rsync(source, destination, description, rsync_flags) + if download: + self.rsync(f"{destination}/*", source, description, rsync_flags) + else: + self.rsync(source, destination, description, rsync_flags) if self.is_windows: # rsync sets very strict file permissions and disables inheritance # we only need to reset permissions so they inherit from the parent @@ -1086,6 +1354,7 @@ def write_and_upload_dot_env(self, env: dict[str, str]): if not env: return write_env = {k: str(v) for (k, v) in env.items()} + write_env["TOOLS_DISTRO_SLUG"] = self.name write_env_filename = ".ci-env" write_env_filepath = tools.utils.REPO_ROOT / ".ci-env" write_env_filepath.write_text(json.dumps(write_env)) @@ -1187,7 +1456,13 @@ def combine_coverage(self): """ Combine the code coverage databases """ - return self.run_nox("combine-coverage", session_args=[self.name]) + return self.run_nox("combine-coverage-onedir") + + def create_xml_coverage_reports(self): + """ + Create XML coverage reports + """ + return self.run_nox("create-xml-coverage-reports-onedir") def compress_dependencies(self): """ @@ -1199,7 +1474,10 @@ def decompress_dependencies(self): """ Decompress nox..tar.* if it exists in the VM """ - return self.run_nox("decompress-dependencies", session_args=[self.name]) + env = {"DELETE_NOX_ARCHIVE": "1"} + return self.run_nox( + "decompress-dependencies", session_args=[self.name], env=env + ) def download_dependencies(self): """ @@ -1250,16 +1528,17 @@ def rsync(self, source, destination, description, rsync_flags: list[str] = None) self.ctx.exit(1, "Could find the 'rsync' binary") if TYPE_CHECKING: assert rsync + ssh_cmd = " ".join( + self.ssh_command_args( + include_vm_target=False, log_command_level=logging.NOTSET + ) + ) cmd: list[str] = [ - rsync, + f'"{rsync}"' if sys.platform == "win32" else rsync, "-az", "--info=none,progress2", "-e", - " ".join( - self.ssh_command_args( - include_vm_target=False, log_command_level=logging.NOTSET - ) - ), + f'"{ssh_cmd}"' if sys.platform == "win32" else ssh_cmd, ] if rsync_flags: cmd.extend(rsync_flags) @@ -1272,6 +1551,8 @@ def rsync(self, source, destination, description, rsync_flags: list[str] = None) log.info(f"Running {' '.join(cmd)!r}") # type: ignore[arg-type] progress = create_progress_bar(transient=True) task = progress.add_task(description, total=100) + if sys.platform == "win32": + cmd = [" ".join(cmd)] with progress: proc = subprocess.Popen(cmd, bufsize=1, stdout=subprocess.PIPE, text=True) completed = 0